diff options
Diffstat (limited to 'crates')
65 files changed, 1278 insertions, 519 deletions
diff --git a/crates/ra_db/Cargo.toml b/crates/ra_db/Cargo.toml index 3568da905..9aa77f72e 100644 --- a/crates/ra_db/Cargo.toml +++ b/crates/ra_db/Cargo.toml | |||
@@ -6,9 +6,10 @@ authors = ["Aleksey Kladov <[email protected]>"] | |||
6 | 6 | ||
7 | [dependencies] | 7 | [dependencies] |
8 | relative-path = "0.4.0" | 8 | relative-path = "0.4.0" |
9 | salsa = "0.10.0-alpha3" | 9 | salsa = "0.10.0-alpha4" |
10 | rustc-hash = "1.0" | 10 | rustc-hash = "1.0" |
11 | parking_lot = "0.7.0" | 11 | parking_lot = "0.7.0" |
12 | |||
12 | ra_arena = { path = "../ra_arena" } | 13 | ra_arena = { path = "../ra_arena" } |
13 | ra_syntax = { path = "../ra_syntax" } | 14 | ra_syntax = { path = "../ra_syntax" } |
14 | test_utils = { path = "../test_utils" } | 15 | test_utils = { path = "../test_utils" } |
diff --git a/crates/ra_db/src/lib.rs b/crates/ra_db/src/lib.rs index 7e13f70bc..6e17f33f0 100644 --- a/crates/ra_db/src/lib.rs +++ b/crates/ra_db/src/lib.rs | |||
@@ -20,7 +20,7 @@ pub use crate::{ | |||
20 | loc2id::LocationIntener, | 20 | loc2id::LocationIntener, |
21 | }; | 21 | }; |
22 | 22 | ||
23 | pub trait BaseDatabase: salsa::Database + panic::RefUnwindSafe { | 23 | pub trait CheckCanceled: salsa::Database + panic::RefUnwindSafe { |
24 | /// Aborts current query if there are pending changes. | 24 | /// Aborts current query if there are pending changes. |
25 | /// | 25 | /// |
26 | /// rust-analyzer needs to be able to answer semantic questions about the | 26 | /// rust-analyzer needs to be able to answer semantic questions about the |
@@ -63,11 +63,15 @@ pub struct FileRange { | |||
63 | pub range: TextRange, | 63 | pub range: TextRange, |
64 | } | 64 | } |
65 | 65 | ||
66 | #[salsa::query_group] | 66 | /// Database which stores all significant input facts: source code and project |
67 | pub trait FilesDatabase: salsa::Database { | 67 | /// model. Everything else in rust-analyzer is derived from these queries. |
68 | #[salsa::query_group(SourceDatabaseStorage)] | ||
69 | pub trait SourceDatabase: salsa::Database + CheckCanceled { | ||
68 | /// Text of the file. | 70 | /// Text of the file. |
69 | #[salsa::input] | 71 | #[salsa::input] |
70 | fn file_text(&self, file_id: FileId) -> Arc<String>; | 72 | fn file_text(&self, file_id: FileId) -> Arc<String>; |
73 | // Parses the file into the syntax tree. | ||
74 | fn parse(&self, file_id: FileId) -> TreeArc<SourceFile>; | ||
71 | /// Path to a file, relative to the root of its source root. | 75 | /// Path to a file, relative to the root of its source root. |
72 | #[salsa::input] | 76 | #[salsa::input] |
73 | fn file_relative_path(&self, file_id: FileId) -> RelativePathBuf; | 77 | fn file_relative_path(&self, file_id: FileId) -> RelativePathBuf; |
@@ -78,20 +82,12 @@ pub trait FilesDatabase: salsa::Database { | |||
78 | #[salsa::input] | 82 | #[salsa::input] |
79 | fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>; | 83 | fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>; |
80 | fn source_root_crates(&self, id: SourceRootId) -> Arc<Vec<CrateId>>; | 84 | fn source_root_crates(&self, id: SourceRootId) -> Arc<Vec<CrateId>>; |
81 | /// The set of "local" (that is, from the current workspace) roots. | ||
82 | /// Files in local roots are assumed to change frequently. | ||
83 | #[salsa::input] | ||
84 | fn local_roots(&self) -> Arc<Vec<SourceRootId>>; | ||
85 | /// The set of roots for crates.io libraries. | ||
86 | /// Files in libraries are assumed to never change. | ||
87 | #[salsa::input] | ||
88 | fn library_roots(&self) -> Arc<Vec<SourceRootId>>; | ||
89 | /// The crate graph. | 85 | /// The crate graph. |
90 | #[salsa::input] | 86 | #[salsa::input] |
91 | fn crate_graph(&self) -> Arc<CrateGraph>; | 87 | fn crate_graph(&self) -> Arc<CrateGraph>; |
92 | } | 88 | } |
93 | 89 | ||
94 | fn source_root_crates(db: &impl FilesDatabase, id: SourceRootId) -> Arc<Vec<CrateId>> { | 90 | fn source_root_crates(db: &impl SourceDatabase, id: SourceRootId) -> Arc<Vec<CrateId>> { |
95 | let root = db.source_root(id); | 91 | let root = db.source_root(id); |
96 | let graph = db.crate_graph(); | 92 | let graph = db.crate_graph(); |
97 | let res = root | 93 | let res = root |
@@ -102,12 +98,7 @@ fn source_root_crates(db: &impl FilesDatabase, id: SourceRootId) -> Arc<Vec<Crat | |||
102 | Arc::new(res) | 98 | Arc::new(res) |
103 | } | 99 | } |
104 | 100 | ||
105 | #[salsa::query_group] | 101 | fn parse(db: &impl SourceDatabase, file_id: FileId) -> TreeArc<SourceFile> { |
106 | pub trait SyntaxDatabase: FilesDatabase + BaseDatabase { | ||
107 | fn source_file(&self, file_id: FileId) -> TreeArc<SourceFile>; | ||
108 | } | ||
109 | |||
110 | fn source_file(db: &impl SyntaxDatabase, file_id: FileId) -> TreeArc<SourceFile> { | ||
111 | let text = db.file_text(file_id); | 102 | let text = db.file_text(file_id); |
112 | SourceFile::parse(&*text) | 103 | SourceFile::parse(&*text) |
113 | } | 104 | } |
diff --git a/crates/ra_hir/Cargo.toml b/crates/ra_hir/Cargo.toml index 3d802ade4..57a4b155b 100644 --- a/crates/ra_hir/Cargo.toml +++ b/crates/ra_hir/Cargo.toml | |||
@@ -12,7 +12,6 @@ rustc-hash = "1.0" | |||
12 | parking_lot = "0.7.0" | 12 | parking_lot = "0.7.0" |
13 | ena = "0.11" | 13 | ena = "0.11" |
14 | join_to_string = "0.1.3" | 14 | join_to_string = "0.1.3" |
15 | salsa = "0.10.0-alpha3" | ||
16 | 15 | ||
17 | ra_syntax = { path = "../ra_syntax" } | 16 | ra_syntax = { path = "../ra_syntax" } |
18 | ra_arena = { path = "../ra_arena" } | 17 | ra_arena = { path = "../ra_arena" } |
diff --git a/crates/ra_hir/src/adt.rs b/crates/ra_hir/src/adt.rs index ec6a10353..22bbad964 100644 --- a/crates/ra_hir/src/adt.rs +++ b/crates/ra_hir/src/adt.rs | |||
@@ -11,7 +11,7 @@ use ra_syntax::{ | |||
11 | 11 | ||
12 | use crate::{ | 12 | use crate::{ |
13 | Name, AsName, Struct, Enum, EnumVariant, Crate, | 13 | Name, AsName, Struct, Enum, EnumVariant, Crate, |
14 | HirDatabase, HirFileId, | 14 | HirDatabase, HirFileId, StructField, FieldSource, |
15 | type_ref::TypeRef, | 15 | type_ref::TypeRef, |
16 | }; | 16 | }; |
17 | 17 | ||
@@ -150,7 +150,7 @@ impl VariantData { | |||
150 | impl VariantData { | 150 | impl VariantData { |
151 | fn new(flavor: StructFlavor) -> Self { | 151 | fn new(flavor: StructFlavor) -> Self { |
152 | let inner = match flavor { | 152 | let inner = match flavor { |
153 | StructFlavor::Tuple(fl) => { | 153 | ast::StructFlavor::Tuple(fl) => { |
154 | let fields = fl | 154 | let fields = fl |
155 | .fields() | 155 | .fields() |
156 | .enumerate() | 156 | .enumerate() |
@@ -161,7 +161,7 @@ impl VariantData { | |||
161 | .collect(); | 161 | .collect(); |
162 | VariantDataInner::Tuple(fields) | 162 | VariantDataInner::Tuple(fields) |
163 | } | 163 | } |
164 | StructFlavor::Named(fl) => { | 164 | ast::StructFlavor::Named(fl) => { |
165 | let fields = fl | 165 | let fields = fl |
166 | .fields() | 166 | .fields() |
167 | .map(|fd| StructFieldData { | 167 | .map(|fd| StructFieldData { |
@@ -171,8 +171,70 @@ impl VariantData { | |||
171 | .collect(); | 171 | .collect(); |
172 | VariantDataInner::Struct(fields) | 172 | VariantDataInner::Struct(fields) |
173 | } | 173 | } |
174 | StructFlavor::Unit => VariantDataInner::Unit, | 174 | ast::StructFlavor::Unit => VariantDataInner::Unit, |
175 | }; | 175 | }; |
176 | VariantData(inner) | 176 | VariantData(inner) |
177 | } | 177 | } |
178 | } | 178 | } |
179 | |||
180 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] | ||
181 | pub enum VariantDef { | ||
182 | Struct(Struct), | ||
183 | EnumVariant(EnumVariant), | ||
184 | } | ||
185 | impl_froms!(VariantDef: Struct, EnumVariant); | ||
186 | |||
187 | impl VariantDef { | ||
188 | pub(crate) fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> { | ||
189 | match self { | ||
190 | VariantDef::Struct(it) => it.field(db, name), | ||
191 | VariantDef::EnumVariant(it) => it.field(db, name), | ||
192 | } | ||
193 | } | ||
194 | pub(crate) fn variant_data(self, db: &impl HirDatabase) -> Arc<VariantData> { | ||
195 | match self { | ||
196 | VariantDef::Struct(it) => it.variant_data(db), | ||
197 | VariantDef::EnumVariant(it) => it.variant_data(db), | ||
198 | } | ||
199 | } | ||
200 | } | ||
201 | |||
202 | impl StructField { | ||
203 | pub(crate) fn source_impl(&self, db: &impl HirDatabase) -> (HirFileId, FieldSource) { | ||
204 | let var_data = self.parent.variant_data(db); | ||
205 | let fields = var_data.fields().unwrap(); | ||
206 | let ss; | ||
207 | let es; | ||
208 | let (file_id, struct_flavor) = match self.parent { | ||
209 | VariantDef::Struct(s) => { | ||
210 | let (file_id, source) = s.source(db); | ||
211 | ss = source; | ||
212 | (file_id, ss.flavor()) | ||
213 | } | ||
214 | VariantDef::EnumVariant(e) => { | ||
215 | let (file_id, source) = e.source(db); | ||
216 | es = source; | ||
217 | (file_id, es.flavor()) | ||
218 | } | ||
219 | }; | ||
220 | |||
221 | let field_sources = match struct_flavor { | ||
222 | ast::StructFlavor::Tuple(fl) => fl | ||
223 | .fields() | ||
224 | .map(|it| FieldSource::Pos(it.to_owned())) | ||
225 | .collect(), | ||
226 | ast::StructFlavor::Named(fl) => fl | ||
227 | .fields() | ||
228 | .map(|it| FieldSource::Named(it.to_owned())) | ||
229 | .collect(), | ||
230 | ast::StructFlavor::Unit => Vec::new(), | ||
231 | }; | ||
232 | let field = field_sources | ||
233 | .into_iter() | ||
234 | .zip(fields.iter()) | ||
235 | .find(|(_syntax, (id, _))| *id == self.id) | ||
236 | .unwrap() | ||
237 | .0; | ||
238 | (file_id, field) | ||
239 | } | ||
240 | } | ||
diff --git a/crates/ra_hir/src/code_model_api.rs b/crates/ra_hir/src/code_model_api.rs index 118562984..191104890 100644 --- a/crates/ra_hir/src/code_model_api.rs +++ b/crates/ra_hir/src/code_model_api.rs | |||
@@ -10,8 +10,8 @@ use crate::{ | |||
10 | nameres::{ModuleScope, lower::ImportId}, | 10 | nameres::{ModuleScope, lower::ImportId}, |
11 | db::HirDatabase, | 11 | db::HirDatabase, |
12 | expr::BodySyntaxMapping, | 12 | expr::BodySyntaxMapping, |
13 | ty::{InferenceResult, VariantDef}, | 13 | ty::InferenceResult, |
14 | adt::{EnumVariantId, StructFieldId}, | 14 | adt::{EnumVariantId, StructFieldId, VariantDef}, |
15 | generics::GenericParams, | 15 | generics::GenericParams, |
16 | docs::{Documentation, Docs, docs_from_ast}, | 16 | docs::{Documentation, Docs, docs_from_ast}, |
17 | module_tree::ModuleId, | 17 | module_tree::ModuleId, |
@@ -177,12 +177,25 @@ impl Module { | |||
177 | } | 177 | } |
178 | } | 178 | } |
179 | 179 | ||
180 | impl Docs for Module { | ||
181 | fn docs(&self, db: &impl HirDatabase) -> Option<Documentation> { | ||
182 | self.declaration_source(db) | ||
183 | .and_then(|it| docs_from_ast(&*it.1)) | ||
184 | } | ||
185 | } | ||
186 | |||
180 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | 187 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
181 | pub struct StructField { | 188 | pub struct StructField { |
182 | parent: VariantDef, | 189 | pub(crate) parent: VariantDef, |
183 | pub(crate) id: StructFieldId, | 190 | pub(crate) id: StructFieldId, |
184 | } | 191 | } |
185 | 192 | ||
193 | #[derive(Debug)] | ||
194 | pub enum FieldSource { | ||
195 | Named(TreeArc<ast::NamedFieldDef>), | ||
196 | Pos(TreeArc<ast::PosFieldDef>), | ||
197 | } | ||
198 | |||
186 | impl StructField { | 199 | impl StructField { |
187 | pub fn name(&self, db: &impl HirDatabase) -> Name { | 200 | pub fn name(&self, db: &impl HirDatabase) -> Name { |
188 | self.parent.variant_data(db).fields().unwrap()[self.id] | 201 | self.parent.variant_data(db).fields().unwrap()[self.id] |
@@ -190,6 +203,10 @@ impl StructField { | |||
190 | .clone() | 203 | .clone() |
191 | } | 204 | } |
192 | 205 | ||
206 | pub fn source(&self, db: &impl HirDatabase) -> (HirFileId, FieldSource) { | ||
207 | self.source_impl(db) | ||
208 | } | ||
209 | |||
193 | pub fn ty(&self, db: &impl HirDatabase) -> Ty { | 210 | pub fn ty(&self, db: &impl HirDatabase) -> Ty { |
194 | db.type_for_field(*self) | 211 | db.type_for_field(*self) |
195 | } | 212 | } |
@@ -199,6 +216,15 @@ impl StructField { | |||
199 | } | 216 | } |
200 | } | 217 | } |
201 | 218 | ||
219 | impl Docs for StructField { | ||
220 | fn docs(&self, db: &impl HirDatabase) -> Option<Documentation> { | ||
221 | match self.source(db).1 { | ||
222 | FieldSource::Named(named) => docs_from_ast(&*named), | ||
223 | FieldSource::Pos(..) => return None, | ||
224 | } | ||
225 | } | ||
226 | } | ||
227 | |||
202 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | 228 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
203 | pub struct Struct { | 229 | pub struct Struct { |
204 | pub(crate) id: StructId, | 230 | pub(crate) id: StructId, |
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index 3c82262a2..5df4bd4a1 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | use std::sync::Arc; | 1 | use std::sync::Arc; |
2 | 2 | ||
3 | use ra_syntax::{SyntaxNode, TreeArc, SourceFile}; | 3 | use ra_syntax::{SyntaxNode, TreeArc, SourceFile}; |
4 | use ra_db::{SyntaxDatabase, CrateId, salsa}; | 4 | use ra_db::{SourceDatabase, CrateId, salsa}; |
5 | 5 | ||
6 | use crate::{ | 6 | use crate::{ |
7 | MacroCallId, HirFileId, | 7 | MacroCallId, HirFileId, |
@@ -18,10 +18,10 @@ use crate::{ | |||
18 | generics::{GenericParams, GenericDef}, | 18 | generics::{GenericParams, GenericDef}, |
19 | }; | 19 | }; |
20 | 20 | ||
21 | #[salsa::query_group] | 21 | #[salsa::query_group(HirDatabaseStorage)] |
22 | pub trait HirDatabase: SyntaxDatabase + AsRef<HirInterner> { | 22 | pub trait HirDatabase: SourceDatabase + AsRef<HirInterner> { |
23 | #[salsa::invoke(HirFileId::hir_source_file)] | 23 | #[salsa::invoke(HirFileId::hir_parse)] |
24 | fn hir_source_file(&self, file_id: HirFileId) -> TreeArc<SourceFile>; | 24 | fn hir_parse(&self, file_id: HirFileId) -> TreeArc<SourceFile>; |
25 | 25 | ||
26 | #[salsa::invoke(crate::macros::expand_macro_invocation)] | 26 | #[salsa::invoke(crate::macros::expand_macro_invocation)] |
27 | fn expand_macro_invocation(&self, invoc: MacroCallId) -> Option<Arc<MacroExpansion>>; | 27 | fn expand_macro_invocation(&self, invoc: MacroCallId) -> Option<Arc<MacroExpansion>>; |
diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs index 5272656ec..7dd4b540e 100644 --- a/crates/ra_hir/src/ids.rs +++ b/crates/ra_hir/src/ids.rs | |||
@@ -86,12 +86,9 @@ impl HirFileId { | |||
86 | } | 86 | } |
87 | } | 87 | } |
88 | 88 | ||
89 | pub(crate) fn hir_source_file( | 89 | pub(crate) fn hir_parse(db: &impl HirDatabase, file_id: HirFileId) -> TreeArc<SourceFile> { |
90 | db: &impl HirDatabase, | ||
91 | file_id: HirFileId, | ||
92 | ) -> TreeArc<SourceFile> { | ||
93 | match file_id.0 { | 90 | match file_id.0 { |
94 | HirFileIdRepr::File(file_id) => db.source_file(file_id), | 91 | HirFileIdRepr::File(file_id) => db.parse(file_id), |
95 | HirFileIdRepr::Macro(m) => { | 92 | HirFileIdRepr::Macro(m) => { |
96 | if let Some(exp) = db.expand_macro_invocation(m) { | 93 | if let Some(exp) = db.expand_macro_invocation(m) { |
97 | return exp.file(); | 94 | return exp.file(); |
@@ -370,7 +367,7 @@ impl SourceFileItems { | |||
370 | self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(), | 367 | self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(), |
371 | ); | 368 | ); |
372 | } | 369 | } |
373 | pub fn id_of_source_file(&self) -> SourceFileItemId { | 370 | pub fn id_of_parse(&self) -> SourceFileItemId { |
374 | let (id, _syntax) = self.arena.iter().next().unwrap(); | 371 | let (id, _syntax) = self.arena.iter().next().unwrap(); |
375 | id | 372 | id |
376 | } | 373 | } |
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs index 596f9c38c..eaf8565ee 100644 --- a/crates/ra_hir/src/lib.rs +++ b/crates/ra_hir/src/lib.rs | |||
@@ -68,7 +68,7 @@ pub use self::code_model_api::{ | |||
68 | Module, ModuleDef, ModuleSource, Problem, | 68 | Module, ModuleDef, ModuleSource, Problem, |
69 | Struct, Enum, EnumVariant, | 69 | Struct, Enum, EnumVariant, |
70 | Function, FnSignature, ScopeEntryWithSyntax, | 70 | Function, FnSignature, ScopeEntryWithSyntax, |
71 | StructField, | 71 | StructField, FieldSource, |
72 | Static, Const, | 72 | Static, Const, |
73 | Trait, Type, | 73 | Trait, Type, |
74 | }; | 74 | }; |
diff --git a/crates/ra_hir/src/mock.rs b/crates/ra_hir/src/mock.rs index 36b174cd6..7da15eca0 100644 --- a/crates/ra_hir/src/mock.rs +++ b/crates/ra_hir/src/mock.rs | |||
@@ -2,7 +2,7 @@ use std::{sync::Arc, panic}; | |||
2 | 2 | ||
3 | use parking_lot::Mutex; | 3 | use parking_lot::Mutex; |
4 | use ra_db::{ | 4 | use ra_db::{ |
5 | BaseDatabase, FilePosition, FileId, CrateGraph, SourceRoot, SourceRootId, FilesDatabase, | 5 | CheckCanceled, FilePosition, FileId, CrateGraph, SourceRoot, SourceRootId, SourceDatabase, salsa, |
6 | }; | 6 | }; |
7 | use relative_path::RelativePathBuf; | 7 | use relative_path::RelativePathBuf; |
8 | use test_utils::{parse_fixture, CURSOR_MARKER, extract_offset}; | 8 | use test_utils::{parse_fixture, CURSOR_MARKER, extract_offset}; |
@@ -11,7 +11,7 @@ use crate::{db, HirInterner}; | |||
11 | 11 | ||
12 | pub const WORKSPACE: SourceRootId = SourceRootId(0); | 12 | pub const WORKSPACE: SourceRootId = SourceRootId(0); |
13 | 13 | ||
14 | #[salsa::database(ra_db::FilesDatabase, ra_db::SyntaxDatabase, db::HirDatabase)] | 14 | #[salsa::database(ra_db::SourceDatabaseStorage, db::HirDatabaseStorage)] |
15 | #[derive(Debug)] | 15 | #[derive(Debug)] |
16 | pub(crate) struct MockDatabase { | 16 | pub(crate) struct MockDatabase { |
17 | events: Mutex<Option<Vec<salsa::Event<MockDatabase>>>>, | 17 | events: Mutex<Option<Vec<salsa::Event<MockDatabase>>>>, |
@@ -140,8 +140,6 @@ impl Default for MockDatabase { | |||
140 | file_counter: 0, | 140 | file_counter: 0, |
141 | }; | 141 | }; |
142 | db.set_crate_graph(Default::default()); | 142 | db.set_crate_graph(Default::default()); |
143 | db.set_local_roots(Default::default()); | ||
144 | db.set_library_roots(Default::default()); | ||
145 | db | 143 | db |
146 | } | 144 | } |
147 | } | 145 | } |
@@ -157,7 +155,7 @@ impl salsa::ParallelDatabase for MockDatabase { | |||
157 | } | 155 | } |
158 | } | 156 | } |
159 | 157 | ||
160 | impl BaseDatabase for MockDatabase {} | 158 | impl CheckCanceled for MockDatabase {} |
161 | 159 | ||
162 | impl AsRef<HirInterner> for MockDatabase { | 160 | impl AsRef<HirInterner> for MockDatabase { |
163 | fn as_ref(&self) -> &HirInterner { | 161 | fn as_ref(&self) -> &HirInterner { |
diff --git a/crates/ra_hir/src/module_tree.rs b/crates/ra_hir/src/module_tree.rs index c00834c4c..d5ad9decb 100644 --- a/crates/ra_hir/src/module_tree.rs +++ b/crates/ra_hir/src/module_tree.rs | |||
@@ -1,6 +1,5 @@ | |||
1 | use std::sync::Arc; | 1 | use std::sync::Arc; |
2 | 2 | ||
3 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
4 | use arrayvec::ArrayVec; | 3 | use arrayvec::ArrayVec; |
5 | use relative_path::RelativePathBuf; | 4 | use relative_path::RelativePathBuf; |
6 | use ra_db::{FileId, SourceRoot, CrateId}; | 5 | use ra_db::{FileId, SourceRoot, CrateId}; |
@@ -147,28 +146,21 @@ impl ModuleTree { | |||
147 | let file_id = crate_graph.crate_root(crate_id); | 146 | let file_id = crate_graph.crate_root(crate_id); |
148 | let source_root_id = db.file_source_root(file_id); | 147 | let source_root_id = db.file_source_root(file_id); |
149 | 148 | ||
150 | let mut roots = FxHashMap::default(); | ||
151 | let mut visited = FxHashSet::default(); | ||
152 | |||
153 | let source_root = db.source_root(source_root_id); | 149 | let source_root = db.source_root(source_root_id); |
154 | let source = SourceItemId { | 150 | let source = SourceItemId { |
155 | file_id: file_id.into(), | 151 | file_id: file_id.into(), |
156 | item_id: None, | 152 | item_id: None, |
157 | }; | 153 | }; |
158 | let module_id = self.init_subtree(db, &source_root, &mut visited, &mut roots, None, source); | 154 | self.init_subtree(db, &source_root, None, source); |
159 | roots.insert(file_id, module_id); | ||
160 | } | 155 | } |
161 | 156 | ||
162 | fn init_subtree( | 157 | fn init_subtree( |
163 | &mut self, | 158 | &mut self, |
164 | db: &impl HirDatabase, | 159 | db: &impl HirDatabase, |
165 | source_root: &SourceRoot, | 160 | source_root: &SourceRoot, |
166 | visited: &mut FxHashSet<SourceItemId>, | ||
167 | roots: &mut FxHashMap<FileId, ModuleId>, | ||
168 | parent: Option<LinkId>, | 161 | parent: Option<LinkId>, |
169 | source: SourceItemId, | 162 | source: SourceItemId, |
170 | ) -> ModuleId { | 163 | ) -> ModuleId { |
171 | visited.insert(source); | ||
172 | let id = self.alloc_mod(ModuleData { | 164 | let id = self.alloc_mod(ModuleData { |
173 | source, | 165 | source, |
174 | parent, | 166 | parent, |
@@ -187,28 +179,21 @@ impl ModuleTree { | |||
187 | let (points_to, problem) = resolve_submodule(db, source.file_id, &sub.name); | 179 | let (points_to, problem) = resolve_submodule(db, source.file_id, &sub.name); |
188 | let points_to = points_to | 180 | let points_to = points_to |
189 | .into_iter() | 181 | .into_iter() |
190 | .map(|file_id| match roots.remove(&file_id) { | 182 | .map(|file_id| { |
191 | Some(module_id) => { | 183 | self.init_subtree( |
192 | self.mods[module_id].parent = Some(link); | ||
193 | module_id | ||
194 | } | ||
195 | None => self.init_subtree( | ||
196 | db, | 184 | db, |
197 | source_root, | 185 | source_root, |
198 | visited, | ||
199 | roots, | ||
200 | Some(link), | 186 | Some(link), |
201 | SourceItemId { | 187 | SourceItemId { |
202 | file_id: file_id.into(), | 188 | file_id: file_id.into(), |
203 | item_id: None, | 189 | item_id: None, |
204 | }, | 190 | }, |
205 | ), | 191 | ) |
206 | }) | 192 | }) |
207 | .collect::<Vec<_>>(); | 193 | .collect::<Vec<_>>(); |
208 | (points_to, problem) | 194 | (points_to, problem) |
209 | } else { | 195 | } else { |
210 | let points_to = | 196 | let points_to = self.init_subtree(db, source_root, Some(link), sub.source); |
211 | self.init_subtree(db, source_root, visited, roots, Some(link), sub.source); | ||
212 | (vec![points_to], None) | 197 | (vec![points_to], None) |
213 | }; | 198 | }; |
214 | 199 | ||
diff --git a/crates/ra_hir/src/nameres.rs b/crates/ra_hir/src/nameres.rs index 028c1882f..5193900e0 100644 --- a/crates/ra_hir/src/nameres.rs +++ b/crates/ra_hir/src/nameres.rs | |||
@@ -190,7 +190,12 @@ where | |||
190 | self.populate_module(module_id, Arc::clone(items)); | 190 | self.populate_module(module_id, Arc::clone(items)); |
191 | } | 191 | } |
192 | 192 | ||
193 | let mut iter = 0; | ||
193 | loop { | 194 | loop { |
195 | iter += 1; | ||
196 | if iter > 1000 { | ||
197 | panic!("failed to reach fixedpoint after 1000 iters") | ||
198 | } | ||
194 | let processed_imports_count = self.processed_imports.len(); | 199 | let processed_imports_count = self.processed_imports.len(); |
195 | for &module_id in self.input.keys() { | 200 | for &module_id in self.input.keys() { |
196 | self.db.check_canceled(); | 201 | self.db.check_canceled(); |
diff --git a/crates/ra_hir/src/nameres/lower.rs b/crates/ra_hir/src/nameres/lower.rs index b4fe99ea7..1d77548f3 100644 --- a/crates/ra_hir/src/nameres/lower.rs +++ b/crates/ra_hir/src/nameres/lower.rs | |||
@@ -129,7 +129,7 @@ impl LoweredModule { | |||
129 | let id = loc.id(db); | 129 | let id = loc.id(db); |
130 | let file_id = HirFileId::from(id); | 130 | let file_id = HirFileId::from(id); |
131 | //FIXME: expand recursively | 131 | //FIXME: expand recursively |
132 | for item in db.hir_source_file(file_id).items() { | 132 | for item in db.hir_parse(file_id).items() { |
133 | self.add_def_id(source_map, db, module, file_id, item); | 133 | self.add_def_id(source_map, db, module, file_id, item); |
134 | } | 134 | } |
135 | } | 135 | } |
diff --git a/crates/ra_hir/src/nameres/tests.rs b/crates/ra_hir/src/nameres/tests.rs index 24936976c..e72781f51 100644 --- a/crates/ra_hir/src/nameres/tests.rs +++ b/crates/ra_hir/src/nameres/tests.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | use std::sync::Arc; | 1 | use std::sync::Arc; |
2 | 2 | ||
3 | use ra_db::{CrateGraph, SourceRootId, FilesDatabase}; | 3 | use ra_db::{CrateGraph, SourceRootId, SourceDatabase}; |
4 | use relative_path::RelativePath; | 4 | use relative_path::RelativePath; |
5 | use test_utils::{assert_eq_text, covers}; | 5 | use test_utils::{assert_eq_text, covers}; |
6 | 6 | ||
diff --git a/crates/ra_hir/src/query_definitions.rs b/crates/ra_hir/src/query_definitions.rs index cf8c7e435..61c93a964 100644 --- a/crates/ra_hir/src/query_definitions.rs +++ b/crates/ra_hir/src/query_definitions.rs | |||
@@ -23,7 +23,7 @@ pub(super) fn fn_scopes(db: &impl HirDatabase, func: Function) -> Arc<FnScopes> | |||
23 | } | 23 | } |
24 | 24 | ||
25 | pub(super) fn file_items(db: &impl HirDatabase, file_id: HirFileId) -> Arc<SourceFileItems> { | 25 | pub(super) fn file_items(db: &impl HirDatabase, file_id: HirFileId) -> Arc<SourceFileItems> { |
26 | let source_file = db.hir_source_file(file_id); | 26 | let source_file = db.hir_parse(file_id); |
27 | let res = SourceFileItems::new(file_id, &source_file); | 27 | let res = SourceFileItems::new(file_id, &source_file); |
28 | Arc::new(res) | 28 | Arc::new(res) |
29 | } | 29 | } |
@@ -34,10 +34,7 @@ pub(super) fn file_item( | |||
34 | ) -> TreeArc<SyntaxNode> { | 34 | ) -> TreeArc<SyntaxNode> { |
35 | match source_item_id.item_id { | 35 | match source_item_id.item_id { |
36 | Some(id) => db.file_items(source_item_id.file_id)[id].to_owned(), | 36 | Some(id) => db.file_items(source_item_id.file_id)[id].to_owned(), |
37 | None => db | 37 | None => db.hir_parse(source_item_id.file_id).syntax().to_owned(), |
38 | .hir_source_file(source_item_id.file_id) | ||
39 | .syntax() | ||
40 | .to_owned(), | ||
41 | } | 38 | } |
42 | } | 39 | } |
43 | 40 | ||
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index dbe040805..c0b3f1cd4 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs | |||
@@ -43,7 +43,7 @@ pub fn module_from_declaration( | |||
43 | 43 | ||
44 | /// Locates the module by position in the source code. | 44 | /// Locates the module by position in the source code. |
45 | pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Module> { | 45 | pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Module> { |
46 | let file = db.source_file(position.file_id); | 46 | let file = db.parse(position.file_id); |
47 | match find_node_at_offset::<ast::Module>(file.syntax(), position.offset) { | 47 | match find_node_at_offset::<ast::Module>(file.syntax(), position.offset) { |
48 | Some(m) if !m.has_semi() => module_from_inline(db, position.file_id.into(), m), | 48 | Some(m) if !m.has_semi() => module_from_inline(db, position.file_id.into(), m), |
49 | _ => module_from_file_id(db, position.file_id.into()), | 49 | _ => module_from_file_id(db, position.file_id.into()), |
@@ -95,7 +95,7 @@ fn module_from_source(db: &impl HirDatabase, source: SourceItemId) -> Option<Mod | |||
95 | } | 95 | } |
96 | 96 | ||
97 | pub fn function_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Function> { | 97 | pub fn function_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Function> { |
98 | let file = db.source_file(position.file_id); | 98 | let file = db.parse(position.file_id); |
99 | let fn_def = find_node_at_offset::<ast::FnDef>(file.syntax(), position.offset)?; | 99 | let fn_def = find_node_at_offset::<ast::FnDef>(file.syntax(), position.offset)?; |
100 | function_from_source(db, position.file_id, fn_def) | 100 | function_from_source(db, position.file_id, fn_def) |
101 | } | 101 | } |
diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index c57e222dd..179ebddee 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs | |||
@@ -38,7 +38,7 @@ use crate::{ | |||
38 | expr::{Body, Expr, BindingAnnotation, Literal, ExprId, Pat, PatId, UnaryOp, BinaryOp, Statement, FieldPat}, | 38 | expr::{Body, Expr, BindingAnnotation, Literal, ExprId, Pat, PatId, UnaryOp, BinaryOp, Statement, FieldPat}, |
39 | generics::GenericParams, | 39 | generics::GenericParams, |
40 | path::GenericArg, | 40 | path::GenericArg, |
41 | adt::VariantData, | 41 | adt::VariantDef, |
42 | }; | 42 | }; |
43 | 43 | ||
44 | /// The ID of a type variable. | 44 | /// The ID of a type variable. |
@@ -696,28 +696,6 @@ pub(super) fn type_for_def(db: &impl HirDatabase, def: TypableDef) -> Ty { | |||
696 | } | 696 | } |
697 | } | 697 | } |
698 | 698 | ||
699 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] | ||
700 | pub enum VariantDef { | ||
701 | Struct(Struct), | ||
702 | EnumVariant(EnumVariant), | ||
703 | } | ||
704 | impl_froms!(VariantDef: Struct, EnumVariant); | ||
705 | |||
706 | impl VariantDef { | ||
707 | pub(crate) fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> { | ||
708 | match self { | ||
709 | VariantDef::Struct(it) => it.field(db, name), | ||
710 | VariantDef::EnumVariant(it) => it.field(db, name), | ||
711 | } | ||
712 | } | ||
713 | pub(crate) fn variant_data(self, db: &impl HirDatabase) -> Arc<VariantData> { | ||
714 | match self { | ||
715 | VariantDef::Struct(it) => it.variant_data(db), | ||
716 | VariantDef::EnumVariant(it) => it.variant_data(db), | ||
717 | } | ||
718 | } | ||
719 | } | ||
720 | |||
721 | pub(super) fn type_for_field(db: &impl HirDatabase, field: StructField) -> Ty { | 699 | pub(super) fn type_for_field(db: &impl HirDatabase, field: StructField) -> Ty { |
722 | let parent_def = field.parent_def(db); | 700 | let parent_def = field.parent_def(db); |
723 | let (generics, module) = match parent_def { | 701 | let (generics, module) = match parent_def { |
@@ -732,8 +710,10 @@ pub(super) fn type_for_field(db: &impl HirDatabase, field: StructField) -> Ty { | |||
732 | /// The result of type inference: A mapping from expressions and patterns to types. | 710 | /// The result of type inference: A mapping from expressions and patterns to types. |
733 | #[derive(Clone, PartialEq, Eq, Debug)] | 711 | #[derive(Clone, PartialEq, Eq, Debug)] |
734 | pub struct InferenceResult { | 712 | pub struct InferenceResult { |
735 | /// For each method call expr, record the function it resolved to. | 713 | /// For each method call expr, records the function it resolves to. |
736 | method_resolutions: FxHashMap<ExprId, Function>, | 714 | method_resolutions: FxHashMap<ExprId, Function>, |
715 | /// For each field access expr, records the field it resolves to. | ||
716 | field_resolutions: FxHashMap<ExprId, StructField>, | ||
737 | type_of_expr: ArenaMap<ExprId, Ty>, | 717 | type_of_expr: ArenaMap<ExprId, Ty>, |
738 | type_of_pat: ArenaMap<PatId, Ty>, | 718 | type_of_pat: ArenaMap<PatId, Ty>, |
739 | } | 719 | } |
@@ -742,6 +722,9 @@ impl InferenceResult { | |||
742 | pub fn method_resolution(&self, expr: ExprId) -> Option<Function> { | 722 | pub fn method_resolution(&self, expr: ExprId) -> Option<Function> { |
743 | self.method_resolutions.get(&expr).map(|it| *it) | 723 | self.method_resolutions.get(&expr).map(|it| *it) |
744 | } | 724 | } |
725 | pub fn field_resolution(&self, expr: ExprId) -> Option<StructField> { | ||
726 | self.field_resolutions.get(&expr).map(|it| *it) | ||
727 | } | ||
745 | } | 728 | } |
746 | 729 | ||
747 | impl Index<ExprId> for InferenceResult { | 730 | impl Index<ExprId> for InferenceResult { |
@@ -770,6 +753,7 @@ struct InferenceContext<'a, D: HirDatabase> { | |||
770 | impl_block: Option<ImplBlock>, | 753 | impl_block: Option<ImplBlock>, |
771 | var_unification_table: InPlaceUnificationTable<TypeVarId>, | 754 | var_unification_table: InPlaceUnificationTable<TypeVarId>, |
772 | method_resolutions: FxHashMap<ExprId, Function>, | 755 | method_resolutions: FxHashMap<ExprId, Function>, |
756 | field_resolutions: FxHashMap<ExprId, StructField>, | ||
773 | type_of_expr: ArenaMap<ExprId, Ty>, | 757 | type_of_expr: ArenaMap<ExprId, Ty>, |
774 | type_of_pat: ArenaMap<PatId, Ty>, | 758 | type_of_pat: ArenaMap<PatId, Ty>, |
775 | /// The return type of the function being inferred. | 759 | /// The return type of the function being inferred. |
@@ -806,7 +790,10 @@ fn binary_op_return_ty(op: BinaryOp, rhs_ty: Ty) -> Ty { | |||
806 | | BinaryOp::BitwiseAnd | 790 | | BinaryOp::BitwiseAnd |
807 | | BinaryOp::BitwiseOr | 791 | | BinaryOp::BitwiseOr |
808 | | BinaryOp::BitwiseXor => match rhs_ty { | 792 | | BinaryOp::BitwiseXor => match rhs_ty { |
809 | Ty::Int(..) | Ty::Float(..) => rhs_ty, | 793 | Ty::Int(..) |
794 | | Ty::Float(..) | ||
795 | | Ty::Infer(InferTy::IntVar(..)) | ||
796 | | Ty::Infer(InferTy::FloatVar(..)) => rhs_ty, | ||
810 | _ => Ty::Unknown, | 797 | _ => Ty::Unknown, |
811 | }, | 798 | }, |
812 | BinaryOp::RangeRightOpen | BinaryOp::RangeRightClosed => Ty::Unknown, | 799 | BinaryOp::RangeRightOpen | BinaryOp::RangeRightClosed => Ty::Unknown, |
@@ -861,6 +848,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
861 | ) -> Self { | 848 | ) -> Self { |
862 | InferenceContext { | 849 | InferenceContext { |
863 | method_resolutions: FxHashMap::default(), | 850 | method_resolutions: FxHashMap::default(), |
851 | field_resolutions: FxHashMap::default(), | ||
864 | type_of_expr: ArenaMap::default(), | 852 | type_of_expr: ArenaMap::default(), |
865 | type_of_pat: ArenaMap::default(), | 853 | type_of_pat: ArenaMap::default(), |
866 | var_unification_table: InPlaceUnificationTable::new(), | 854 | var_unification_table: InPlaceUnificationTable::new(), |
@@ -886,6 +874,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
886 | } | 874 | } |
887 | InferenceResult { | 875 | InferenceResult { |
888 | method_resolutions: mem::replace(&mut self.method_resolutions, Default::default()), | 876 | method_resolutions: mem::replace(&mut self.method_resolutions, Default::default()), |
877 | field_resolutions: mem::replace(&mut self.field_resolutions, Default::default()), | ||
889 | type_of_expr: expr_types, | 878 | type_of_expr: expr_types, |
890 | type_of_pat: pat_types, | 879 | type_of_pat: pat_types, |
891 | } | 880 | } |
@@ -899,6 +888,10 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
899 | self.method_resolutions.insert(expr, func); | 888 | self.method_resolutions.insert(expr, func); |
900 | } | 889 | } |
901 | 890 | ||
891 | fn write_field_resolution(&mut self, expr: ExprId, field: StructField) { | ||
892 | self.field_resolutions.insert(expr, field); | ||
893 | } | ||
894 | |||
902 | fn write_pat_ty(&mut self, pat: PatId, ty: Ty) { | 895 | fn write_pat_ty(&mut self, pat: PatId, ty: Ty) { |
903 | self.type_of_pat.insert(pat, ty); | 896 | self.type_of_pat.insert(pat, ty); |
904 | } | 897 | } |
@@ -1251,9 +1244,9 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
1251 | ty | 1244 | ty |
1252 | } | 1245 | } |
1253 | 1246 | ||
1254 | fn infer_expr(&mut self, expr: ExprId, expected: &Expectation) -> Ty { | 1247 | fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { |
1255 | let body = Arc::clone(&self.body); // avoid borrow checker problem | 1248 | let body = Arc::clone(&self.body); // avoid borrow checker problem |
1256 | let ty = match &body[expr] { | 1249 | let ty = match &body[tgt_expr] { |
1257 | Expr::Missing => Ty::Unknown, | 1250 | Expr::Missing => Ty::Unknown, |
1258 | Expr::If { | 1251 | Expr::If { |
1259 | condition, | 1252 | condition, |
@@ -1344,7 +1337,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
1344 | let resolved = receiver_ty.clone().lookup_method(self.db, method_name); | 1337 | let resolved = receiver_ty.clone().lookup_method(self.db, method_name); |
1345 | let method_ty = match resolved { | 1338 | let method_ty = match resolved { |
1346 | Some(func) => { | 1339 | Some(func) => { |
1347 | self.write_method_resolution(expr, func); | 1340 | self.write_method_resolution(tgt_expr, func); |
1348 | self.db.type_for_def(func.into()) | 1341 | self.db.type_for_def(func.into()) |
1349 | } | 1342 | } |
1350 | None => Ty::Unknown, | 1343 | None => Ty::Unknown, |
@@ -1389,7 +1382,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
1389 | 1382 | ||
1390 | expected.ty | 1383 | expected.ty |
1391 | } | 1384 | } |
1392 | Expr::Path(p) => self.infer_path_expr(expr, p).unwrap_or(Ty::Unknown), | 1385 | Expr::Path(p) => self.infer_path_expr(tgt_expr, p).unwrap_or(Ty::Unknown), |
1393 | Expr::Continue => Ty::Never, | 1386 | Expr::Continue => Ty::Never, |
1394 | Expr::Break { expr } => { | 1387 | Expr::Break { expr } => { |
1395 | if let Some(expr) = expr { | 1388 | if let Some(expr) = expr { |
@@ -1436,9 +1429,10 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
1436 | def_id: AdtDef::Struct(s), | 1429 | def_id: AdtDef::Struct(s), |
1437 | ref substs, | 1430 | ref substs, |
1438 | .. | 1431 | .. |
1439 | } => s | 1432 | } => s.field(self.db, name).map(|field| { |
1440 | .field(self.db, name) | 1433 | self.write_field_resolution(tgt_expr, field); |
1441 | .map(|field| field.ty(self.db).subst(substs)), | 1434 | field.ty(self.db).subst(substs) |
1435 | }), | ||
1442 | _ => None, | 1436 | _ => None, |
1443 | }) | 1437 | }) |
1444 | .unwrap_or(Ty::Unknown); | 1438 | .unwrap_or(Ty::Unknown); |
@@ -1545,7 +1539,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
1545 | let ty = self.insert_type_vars_shallow(ty); | 1539 | let ty = self.insert_type_vars_shallow(ty); |
1546 | self.unify(&ty, &expected.ty); | 1540 | self.unify(&ty, &expected.ty); |
1547 | let ty = self.resolve_ty_as_possible(ty); | 1541 | let ty = self.resolve_ty_as_possible(ty); |
1548 | self.write_expr_ty(expr, ty.clone()); | 1542 | self.write_expr_ty(tgt_expr, ty.clone()); |
1549 | ty | 1543 | ty |
1550 | } | 1544 | } |
1551 | 1545 | ||
diff --git a/crates/ra_hir/src/ty/snapshots/tests__bug_651.snap b/crates/ra_hir/src/ty/snapshots/tests__bug_651.snap new file mode 100644 index 000000000..d23d3f139 --- /dev/null +++ b/crates/ra_hir/src/ty/snapshots/tests__bug_651.snap | |||
@@ -0,0 +1,13 @@ | |||
1 | --- | ||
2 | created: "2019-01-25T20:31:47.275112244+00:00" | ||
3 | creator: [email protected] | ||
4 | expression: "&result" | ||
5 | source: crates/ra_hir/src/ty/tests.rs | ||
6 | --- | ||
7 | [11; 41) '{ ...+ y; }': () | ||
8 | [21; 22) 'y': i32 | ||
9 | [25; 27) '92': i32 | ||
10 | [33; 34) '1': i32 | ||
11 | [33; 38) '1 + y': i32 | ||
12 | [37; 38) 'y': i32 | ||
13 | |||
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index 92c74cf00..e0b0689f8 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | use std::sync::Arc; | 1 | use std::sync::Arc; |
2 | use std::fmt::Write; | 2 | use std::fmt::Write; |
3 | 3 | ||
4 | use ra_db::{SyntaxDatabase, salsa::Database}; | 4 | use ra_db::{SourceDatabase, salsa::Database}; |
5 | use ra_syntax::ast::{self, AstNode}; | 5 | use ra_syntax::ast::{self, AstNode}; |
6 | 6 | ||
7 | use crate::{ | 7 | use crate::{ |
@@ -532,9 +532,22 @@ fn test() { | |||
532 | ); | 532 | ); |
533 | } | 533 | } |
534 | 534 | ||
535 | #[test] | ||
536 | fn bug_651() { | ||
537 | check_inference( | ||
538 | "bug_651", | ||
539 | r#" | ||
540 | fn quux() { | ||
541 | let y = 92; | ||
542 | 1 + y; | ||
543 | } | ||
544 | "#, | ||
545 | ); | ||
546 | } | ||
547 | |||
535 | fn infer(content: &str) -> String { | 548 | fn infer(content: &str) -> String { |
536 | let (db, _, file_id) = MockDatabase::with_single_file(content); | 549 | let (db, _, file_id) = MockDatabase::with_single_file(content); |
537 | let source_file = db.source_file(file_id); | 550 | let source_file = db.parse(file_id); |
538 | let mut acc = String::new(); | 551 | let mut acc = String::new(); |
539 | for fn_def in source_file | 552 | for fn_def in source_file |
540 | .syntax() | 553 | .syntax() |
diff --git a/crates/ra_ide_api/Cargo.toml b/crates/ra_ide_api/Cargo.toml index 61942bbbb..79e473463 100644 --- a/crates/ra_ide_api/Cargo.toml +++ b/crates/ra_ide_api/Cargo.toml | |||
@@ -6,6 +6,7 @@ authors = ["Aleksey Kladov <[email protected]>"] | |||
6 | 6 | ||
7 | [dependencies] | 7 | [dependencies] |
8 | itertools = "0.8.0" | 8 | itertools = "0.8.0" |
9 | join_to_string = "0.1.3" | ||
9 | log = "0.4.5" | 10 | log = "0.4.5" |
10 | relative-path = "0.4.0" | 11 | relative-path = "0.4.0" |
11 | rayon = "1.0.2" | 12 | rayon = "1.0.2" |
@@ -13,7 +14,6 @@ fst = "0.3.1" | |||
13 | rustc-hash = "1.0" | 14 | rustc-hash = "1.0" |
14 | parking_lot = "0.7.0" | 15 | parking_lot = "0.7.0" |
15 | unicase = "2.2.0" | 16 | unicase = "2.2.0" |
16 | salsa = "0.10.0-alpha3" | ||
17 | 17 | ||
18 | ra_syntax = { path = "../ra_syntax" } | 18 | ra_syntax = { path = "../ra_syntax" } |
19 | ra_ide_api_light = { path = "../ra_ide_api_light" } | 19 | ra_ide_api_light = { path = "../ra_ide_api_light" } |
diff --git a/crates/ra_ide_api/src/call_info.rs b/crates/ra_ide_api/src/call_info.rs index 0449c1902..3267fff96 100644 --- a/crates/ra_ide_api/src/call_info.rs +++ b/crates/ra_ide_api/src/call_info.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use ra_db::SyntaxDatabase; | 1 | use ra_db::SourceDatabase; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | AstNode, SyntaxNode, TextUnit, TextRange, | 3 | AstNode, SyntaxNode, TextUnit, TextRange, |
4 | SyntaxKind::FN_DEF, | 4 | SyntaxKind::FN_DEF, |
@@ -10,7 +10,7 @@ use crate::{FilePosition, CallInfo, db::RootDatabase}; | |||
10 | 10 | ||
11 | /// Computes parameter information for the given call expression. | 11 | /// Computes parameter information for the given call expression. |
12 | pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> { | 12 | pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> { |
13 | let file = db.source_file(position.file_id); | 13 | let file = db.parse(position.file_id); |
14 | let syntax = file.syntax(); | 14 | let syntax = file.syntax(); |
15 | 15 | ||
16 | // Find the calling expression and it's NameRef | 16 | // Find the calling expression and it's NameRef |
@@ -22,7 +22,7 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal | |||
22 | let symbol = file_symbols | 22 | let symbol = file_symbols |
23 | .into_iter() | 23 | .into_iter() |
24 | .find(|it| it.ptr.kind() == FN_DEF)?; | 24 | .find(|it| it.ptr.kind() == FN_DEF)?; |
25 | let fn_file = db.source_file(symbol.file_id); | 25 | let fn_file = db.parse(symbol.file_id); |
26 | let fn_def = symbol.ptr.to_node(&fn_file); | 26 | let fn_def = symbol.ptr.to_node(&fn_file); |
27 | let fn_def = ast::FnDef::cast(fn_def).unwrap(); | 27 | let fn_def = ast::FnDef::cast(fn_def).unwrap(); |
28 | let mut call_info = CallInfo::new(fn_def)?; | 28 | let mut call_info = CallInfo::new(fn_def)?; |
diff --git a/crates/ra_ide_api/src/completion.rs b/crates/ra_ide_api/src/completion.rs index 565d57c37..b1867de42 100644 --- a/crates/ra_ide_api/src/completion.rs +++ b/crates/ra_ide_api/src/completion.rs | |||
@@ -9,7 +9,7 @@ mod complete_path; | |||
9 | mod complete_scope; | 9 | mod complete_scope; |
10 | mod complete_postfix; | 10 | mod complete_postfix; |
11 | 11 | ||
12 | use ra_db::SyntaxDatabase; | 12 | use ra_db::SourceDatabase; |
13 | 13 | ||
14 | use crate::{ | 14 | use crate::{ |
15 | db, | 15 | db, |
@@ -45,7 +45,7 @@ pub use crate::completion::completion_item::{CompletionItem, CompletionItemKind, | |||
45 | /// identifier prefix/fuzzy match should be done higher in the stack, together | 45 | /// identifier prefix/fuzzy match should be done higher in the stack, together |
46 | /// with ordering of completions (currently this is done by the client). | 46 | /// with ordering of completions (currently this is done by the client). |
47 | pub(crate) fn completions(db: &db::RootDatabase, position: FilePosition) -> Option<Completions> { | 47 | pub(crate) fn completions(db: &db::RootDatabase, position: FilePosition) -> Option<Completions> { |
48 | let original_file = db.source_file(position.file_id); | 48 | let original_file = db.parse(position.file_id); |
49 | let ctx = CompletionContext::new(db, &original_file, position)?; | 49 | let ctx = CompletionContext::new(db, &original_file, position)?; |
50 | 50 | ||
51 | let mut acc = Completions::default(); | 51 | let mut acc = Completions::default(); |
diff --git a/crates/ra_ide_api/src/completion/complete_dot.rs b/crates/ra_ide_api/src/completion/complete_dot.rs index 060a46c5e..bad51cc51 100644 --- a/crates/ra_ide_api/src/completion/complete_dot.rs +++ b/crates/ra_ide_api/src/completion/complete_dot.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use hir::{Ty, AdtDef}; | 1 | use hir::{Ty, AdtDef, Docs}; |
2 | 2 | ||
3 | use crate::completion::{CompletionContext, Completions, CompletionItem, CompletionItemKind}; | 3 | use crate::completion::{CompletionContext, Completions, CompletionItem, CompletionItemKind}; |
4 | use crate::completion::completion_item::CompletionKind; | 4 | use crate::completion::completion_item::CompletionKind; |
@@ -38,6 +38,7 @@ fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) | |||
38 | ) | 38 | ) |
39 | .kind(CompletionItemKind::Field) | 39 | .kind(CompletionItemKind::Field) |
40 | .detail(field.ty(ctx.db).subst(substs).to_string()) | 40 | .detail(field.ty(ctx.db).subst(substs).to_string()) |
41 | .set_documentation(field.docs(ctx.db)) | ||
41 | .add_to(acc); | 42 | .add_to(acc); |
42 | } | 43 | } |
43 | } | 44 | } |
@@ -107,7 +108,10 @@ mod tests { | |||
107 | check_ref_completion( | 108 | check_ref_completion( |
108 | "struct_field_completion_self", | 109 | "struct_field_completion_self", |
109 | r" | 110 | r" |
110 | struct A { the_field: (u32,) } | 111 | struct A { |
112 | /// This is the_field | ||
113 | the_field: (u32,) | ||
114 | } | ||
111 | impl A { | 115 | impl A { |
112 | fn foo(self) { | 116 | fn foo(self) { |
113 | self.<|> | 117 | self.<|> |
diff --git a/crates/ra_ide_api/src/completion/complete_path.rs b/crates/ra_ide_api/src/completion/complete_path.rs index e3f1d42f8..b33ddcde5 100644 --- a/crates/ra_ide_api/src/completion/complete_path.rs +++ b/crates/ra_ide_api/src/completion/complete_path.rs | |||
@@ -1,3 +1,5 @@ | |||
1 | use join_to_string::join; | ||
2 | |||
1 | use crate::{ | 3 | use crate::{ |
2 | completion::{CompletionItem, CompletionItemKind, Completions, CompletionKind, CompletionContext}, | 4 | completion::{CompletionItem, CompletionItemKind, Completions, CompletionKind, CompletionContext}, |
3 | }; | 5 | }; |
@@ -29,6 +31,15 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) { | |||
29 | hir::ModuleDef::Enum(e) => { | 31 | hir::ModuleDef::Enum(e) => { |
30 | e.variants(ctx.db).into_iter().for_each(|variant| { | 32 | e.variants(ctx.db).into_iter().for_each(|variant| { |
31 | if let Some(name) = variant.name(ctx.db) { | 33 | if let Some(name) = variant.name(ctx.db) { |
34 | let detail_types = variant | ||
35 | .fields(ctx.db) | ||
36 | .into_iter() | ||
37 | .map(|field| field.ty(ctx.db)); | ||
38 | let detail = join(detail_types) | ||
39 | .separator(", ") | ||
40 | .surround_with("(", ")") | ||
41 | .to_string(); | ||
42 | |||
32 | CompletionItem::new( | 43 | CompletionItem::new( |
33 | CompletionKind::Reference, | 44 | CompletionKind::Reference, |
34 | ctx.source_range(), | 45 | ctx.source_range(), |
@@ -36,6 +47,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) { | |||
36 | ) | 47 | ) |
37 | .kind(CompletionItemKind::EnumVariant) | 48 | .kind(CompletionItemKind::EnumVariant) |
38 | .set_documentation(variant.docs(ctx.db)) | 49 | .set_documentation(variant.docs(ctx.db)) |
50 | .set_detail(Some(detail)) | ||
39 | .add_to(acc) | 51 | .add_to(acc) |
40 | } | 52 | } |
41 | }); | 53 | }); |
@@ -54,6 +66,22 @@ mod tests { | |||
54 | } | 66 | } |
55 | 67 | ||
56 | #[test] | 68 | #[test] |
69 | fn completes_mod_with_docs() { | ||
70 | check_reference_completion( | ||
71 | "mod_with_docs", | ||
72 | r" | ||
73 | use self::my<|>; | ||
74 | |||
75 | /// Some simple | ||
76 | /// docs describing `mod my`. | ||
77 | mod my { | ||
78 | struct Bar; | ||
79 | } | ||
80 | ", | ||
81 | ); | ||
82 | } | ||
83 | |||
84 | #[test] | ||
57 | fn completes_use_item_starting_with_self() { | 85 | fn completes_use_item_starting_with_self() { |
58 | check_reference_completion( | 86 | check_reference_completion( |
59 | "use_item_starting_with_self", | 87 | "use_item_starting_with_self", |
@@ -116,7 +144,7 @@ mod tests { | |||
116 | #[test] | 144 | #[test] |
117 | fn completes_enum_variant() { | 145 | fn completes_enum_variant() { |
118 | check_reference_completion( | 146 | check_reference_completion( |
119 | "reference_completion", | 147 | "enum_variant", |
120 | " | 148 | " |
121 | //- /lib.rs | 149 | //- /lib.rs |
122 | /// An enum | 150 | /// An enum |
@@ -130,4 +158,25 @@ mod tests { | |||
130 | ", | 158 | ", |
131 | ); | 159 | ); |
132 | } | 160 | } |
161 | |||
162 | #[test] | ||
163 | fn completes_enum_variant_with_details() { | ||
164 | check_reference_completion( | ||
165 | "enum_variant_with_details", | ||
166 | " | ||
167 | //- /lib.rs | ||
168 | struct S { field: u32 } | ||
169 | /// An enum | ||
170 | enum E { | ||
171 | /// Foo Variant (empty) | ||
172 | Foo, | ||
173 | /// Bar Variant with i32 and u32 | ||
174 | Bar(i32, u32), | ||
175 | /// | ||
176 | S(S), | ||
177 | } | ||
178 | fn foo() { let _ = E::<|> } | ||
179 | ", | ||
180 | ); | ||
181 | } | ||
133 | } | 182 | } |
diff --git a/crates/ra_ide_api/src/completion/completion_item.rs b/crates/ra_ide_api/src/completion/completion_item.rs index b2b047766..b16ac2b28 100644 --- a/crates/ra_ide_api/src/completion/completion_item.rs +++ b/crates/ra_ide_api/src/completion/completion_item.rs | |||
@@ -215,7 +215,7 @@ impl Builder { | |||
215 | Some(it) => it, | 215 | Some(it) => it, |
216 | }; | 216 | }; |
217 | let (kind, docs) = match def { | 217 | let (kind, docs) = match def { |
218 | hir::ModuleDef::Module(_) => (CompletionItemKind::Module, None), | 218 | hir::ModuleDef::Module(it) => (CompletionItemKind::Module, it.docs(ctx.db)), |
219 | hir::ModuleDef::Function(func) => return self.from_function(ctx, func), | 219 | hir::ModuleDef::Function(func) => return self.from_function(ctx, func), |
220 | hir::ModuleDef::Struct(it) => (CompletionItemKind::Struct, it.docs(ctx.db)), | 220 | hir::ModuleDef::Struct(it) => (CompletionItemKind::Struct, it.docs(ctx.db)), |
221 | hir::ModuleDef::Enum(it) => (CompletionItemKind::Enum, it.docs(ctx.db)), | 221 | hir::ModuleDef::Enum(it) => (CompletionItemKind::Enum, it.docs(ctx.db)), |
diff --git a/crates/ra_ide_api/src/completion/snapshots/completion_item__reference_completion.snap b/crates/ra_ide_api/src/completion/snapshots/completion_item__enum_variant.snap index e46f7807b..8c84439b2 100644 --- a/crates/ra_ide_api/src/completion/snapshots/completion_item__reference_completion.snap +++ b/crates/ra_ide_api/src/completion/snapshots/completion_item__enum_variant.snap | |||
@@ -1,8 +1,8 @@ | |||
1 | --- | 1 | --- |
2 | created: "2019-01-23T23:49:43.278245900+00:00" | 2 | created: "2019-01-25T16:44:04.640545300+00:00" |
3 | creator: [email protected].1 | 3 | creator: [email protected].2 |
4 | expression: kind_completions | 4 | expression: kind_completions |
5 | source: "crates\\ra_ide_api\\src\\completion\\completion_item.rs" | 5 | source: crates/ra_ide_api/src/completion/completion_item.rs |
6 | --- | 6 | --- |
7 | [ | 7 | [ |
8 | CompletionItem { | 8 | CompletionItem { |
@@ -11,7 +11,9 @@ source: "crates\\ra_ide_api\\src\\completion\\completion_item.rs" | |||
11 | kind: Some( | 11 | kind: Some( |
12 | EnumVariant | 12 | EnumVariant |
13 | ), | 13 | ), |
14 | detail: None, | 14 | detail: Some( |
15 | "()" | ||
16 | ), | ||
15 | documentation: Some( | 17 | documentation: Some( |
16 | Documentation( | 18 | Documentation( |
17 | "Foo Variant" | 19 | "Foo Variant" |
@@ -29,7 +31,9 @@ source: "crates\\ra_ide_api\\src\\completion\\completion_item.rs" | |||
29 | kind: Some( | 31 | kind: Some( |
30 | EnumVariant | 32 | EnumVariant |
31 | ), | 33 | ), |
32 | detail: None, | 34 | detail: Some( |
35 | "(i32)" | ||
36 | ), | ||
33 | documentation: Some( | 37 | documentation: Some( |
34 | Documentation( | 38 | Documentation( |
35 | "Bar Variant with i32" | 39 | "Bar Variant with i32" |
diff --git a/crates/ra_ide_api/src/completion/snapshots/completion_item__enum_variant_with_details.snap b/crates/ra_ide_api/src/completion/snapshots/completion_item__enum_variant_with_details.snap new file mode 100644 index 000000000..384634517 --- /dev/null +++ b/crates/ra_ide_api/src/completion/snapshots/completion_item__enum_variant_with_details.snap | |||
@@ -0,0 +1,64 @@ | |||
1 | --- | ||
2 | created: "2019-01-25T16:44:04.641542400+00:00" | ||
3 | creator: [email protected] | ||
4 | expression: kind_completions | ||
5 | source: crates/ra_ide_api/src/completion/completion_item.rs | ||
6 | --- | ||
7 | [ | ||
8 | CompletionItem { | ||
9 | completion_kind: Reference, | ||
10 | label: "Foo", | ||
11 | kind: Some( | ||
12 | EnumVariant | ||
13 | ), | ||
14 | detail: Some( | ||
15 | "()" | ||
16 | ), | ||
17 | documentation: Some( | ||
18 | Documentation( | ||
19 | "Foo Variant (empty)" | ||
20 | ) | ||
21 | ), | ||
22 | lookup: None, | ||
23 | insert_text: None, | ||
24 | insert_text_format: PlainText, | ||
25 | source_range: [180; 180), | ||
26 | text_edit: None | ||
27 | }, | ||
28 | CompletionItem { | ||
29 | completion_kind: Reference, | ||
30 | label: "Bar", | ||
31 | kind: Some( | ||
32 | EnumVariant | ||
33 | ), | ||
34 | detail: Some( | ||
35 | "(i32, u32)" | ||
36 | ), | ||
37 | documentation: Some( | ||
38 | Documentation( | ||
39 | "Bar Variant with i32 and u32" | ||
40 | ) | ||
41 | ), | ||
42 | lookup: None, | ||
43 | insert_text: None, | ||
44 | insert_text_format: PlainText, | ||
45 | source_range: [180; 180), | ||
46 | text_edit: None | ||
47 | }, | ||
48 | CompletionItem { | ||
49 | completion_kind: Reference, | ||
50 | label: "S", | ||
51 | kind: Some( | ||
52 | EnumVariant | ||
53 | ), | ||
54 | detail: Some( | ||
55 | "(S)" | ||
56 | ), | ||
57 | documentation: None, | ||
58 | lookup: None, | ||
59 | insert_text: None, | ||
60 | insert_text_format: PlainText, | ||
61 | source_range: [180; 180), | ||
62 | text_edit: None | ||
63 | } | ||
64 | ] | ||
diff --git a/crates/ra_ide_api/src/completion/snapshots/completion_item__mod_with_docs.snap b/crates/ra_ide_api/src/completion/snapshots/completion_item__mod_with_docs.snap new file mode 100644 index 000000000..3db7119a7 --- /dev/null +++ b/crates/ra_ide_api/src/completion/snapshots/completion_item__mod_with_docs.snap | |||
@@ -0,0 +1,26 @@ | |||
1 | --- | ||
2 | created: "2019-01-25T17:49:28.949186500+00:00" | ||
3 | creator: [email protected] | ||
4 | expression: kind_completions | ||
5 | source: crates/ra_ide_api/src/completion/completion_item.rs | ||
6 | --- | ||
7 | [ | ||
8 | CompletionItem { | ||
9 | completion_kind: Reference, | ||
10 | label: "my", | ||
11 | kind: Some( | ||
12 | Module | ||
13 | ), | ||
14 | detail: None, | ||
15 | documentation: Some( | ||
16 | Documentation( | ||
17 | "Some simple\ndocs describing `mod my`." | ||
18 | ) | ||
19 | ), | ||
20 | lookup: None, | ||
21 | insert_text: None, | ||
22 | insert_text_format: PlainText, | ||
23 | source_range: [23; 25), | ||
24 | text_edit: None | ||
25 | } | ||
26 | ] | ||
diff --git a/crates/ra_ide_api/src/completion/snapshots/completion_item__struct_field_completion_self.snap b/crates/ra_ide_api/src/completion/snapshots/completion_item__struct_field_completion_self.snap index 80e8f3df5..580e92a90 100644 --- a/crates/ra_ide_api/src/completion/snapshots/completion_item__struct_field_completion_self.snap +++ b/crates/ra_ide_api/src/completion/snapshots/completion_item__struct_field_completion_self.snap | |||
@@ -1,5 +1,5 @@ | |||
1 | --- | 1 | --- |
2 | created: "2019-01-23T13:19:23.501297515+00:00" | 2 | created: "2019-01-25T19:27:09.519688600+00:00" |
3 | creator: [email protected] | 3 | creator: [email protected] |
4 | expression: kind_completions | 4 | expression: kind_completions |
5 | source: crates/ra_ide_api/src/completion/completion_item.rs | 5 | source: crates/ra_ide_api/src/completion/completion_item.rs |
@@ -14,11 +14,15 @@ source: crates/ra_ide_api/src/completion/completion_item.rs | |||
14 | detail: Some( | 14 | detail: Some( |
15 | "(u32,)" | 15 | "(u32,)" |
16 | ), | 16 | ), |
17 | documentation: None, | 17 | documentation: Some( |
18 | Documentation( | ||
19 | "This is the_field" | ||
20 | ) | ||
21 | ), | ||
18 | lookup: None, | 22 | lookup: None, |
19 | insert_text: None, | 23 | insert_text: None, |
20 | insert_text_format: PlainText, | 24 | insert_text_format: PlainText, |
21 | source_range: [121; 121), | 25 | source_range: [187; 187), |
22 | text_edit: None | 26 | text_edit: None |
23 | }, | 27 | }, |
24 | CompletionItem { | 28 | CompletionItem { |
@@ -36,7 +40,7 @@ source: crates/ra_ide_api/src/completion/completion_item.rs | |||
36 | "foo()$0" | 40 | "foo()$0" |
37 | ), | 41 | ), |
38 | insert_text_format: Snippet, | 42 | insert_text_format: Snippet, |
39 | source_range: [121; 121), | 43 | source_range: [187; 187), |
40 | text_edit: None | 44 | text_edit: None |
41 | } | 45 | } |
42 | ] | 46 | ] |
diff --git a/crates/ra_ide_api/src/db.rs b/crates/ra_ide_api/src/db.rs index f0190ae51..3da93ec35 100644 --- a/crates/ra_ide_api/src/db.rs +++ b/crates/ra_ide_api/src/db.rs | |||
@@ -1,18 +1,17 @@ | |||
1 | use std::sync::Arc; | 1 | use std::sync::Arc; |
2 | 2 | ||
3 | use ra_db::{ | 3 | use ra_db::{ |
4 | BaseDatabase, FileId, Canceled, | 4 | CheckCanceled, FileId, Canceled, SourceDatabase, |
5 | salsa::{self, Database}, | 5 | salsa, |
6 | }; | 6 | }; |
7 | 7 | ||
8 | use crate::{symbol_index, LineIndex}; | 8 | use crate::{LineIndex, symbol_index::{self, SymbolsDatabase}}; |
9 | 9 | ||
10 | #[salsa::database( | 10 | #[salsa::database( |
11 | ra_db::FilesDatabase, | 11 | ra_db::SourceDatabaseStorage, |
12 | ra_db::SyntaxDatabase, | 12 | LineIndexDatabaseStorage, |
13 | LineIndexDatabase, | 13 | symbol_index::SymbolsDatabaseStorage, |
14 | symbol_index::SymbolsDatabase, | 14 | hir::db::HirDatabaseStorage |
15 | hir::db::HirDatabase | ||
16 | )] | 15 | )] |
17 | #[derive(Debug)] | 16 | #[derive(Debug)] |
18 | pub(crate) struct RootDatabase { | 17 | pub(crate) struct RootDatabase { |
@@ -35,12 +34,9 @@ impl Default for RootDatabase { | |||
35 | runtime: salsa::Runtime::default(), | 34 | runtime: salsa::Runtime::default(), |
36 | interner: Default::default(), | 35 | interner: Default::default(), |
37 | }; | 36 | }; |
38 | db.query_mut(ra_db::CrateGraphQuery) | 37 | db.set_crate_graph(Default::default()); |
39 | .set((), Default::default()); | 38 | db.set_local_roots(Default::default()); |
40 | db.query_mut(ra_db::LocalRootsQuery) | 39 | db.set_library_roots(Default::default()); |
41 | .set((), Default::default()); | ||
42 | db.query_mut(ra_db::LibraryRootsQuery) | ||
43 | .set((), Default::default()); | ||
44 | db | 40 | db |
45 | } | 41 | } |
46 | } | 42 | } |
@@ -54,7 +50,7 @@ impl salsa::ParallelDatabase for RootDatabase { | |||
54 | } | 50 | } |
55 | } | 51 | } |
56 | 52 | ||
57 | impl BaseDatabase for RootDatabase {} | 53 | impl CheckCanceled for RootDatabase {} |
58 | 54 | ||
59 | impl AsRef<hir::HirInterner> for RootDatabase { | 55 | impl AsRef<hir::HirInterner> for RootDatabase { |
60 | fn as_ref(&self) -> &hir::HirInterner { | 56 | fn as_ref(&self) -> &hir::HirInterner { |
@@ -62,12 +58,12 @@ impl AsRef<hir::HirInterner> for RootDatabase { | |||
62 | } | 58 | } |
63 | } | 59 | } |
64 | 60 | ||
65 | #[salsa::query_group] | 61 | #[salsa::query_group(LineIndexDatabaseStorage)] |
66 | pub(crate) trait LineIndexDatabase: ra_db::FilesDatabase + BaseDatabase { | 62 | pub(crate) trait LineIndexDatabase: ra_db::SourceDatabase + CheckCanceled { |
67 | fn line_index(&self, file_id: FileId) -> Arc<LineIndex>; | 63 | fn line_index(&self, file_id: FileId) -> Arc<LineIndex>; |
68 | } | 64 | } |
69 | 65 | ||
70 | fn line_index(db: &impl ra_db::FilesDatabase, file_id: FileId) -> Arc<LineIndex> { | 66 | fn line_index(db: &impl ra_db::SourceDatabase, file_id: FileId) -> Arc<LineIndex> { |
71 | let text = db.file_text(file_id); | 67 | let text = db.file_text(file_id); |
72 | Arc::new(LineIndex::new(&*text)) | 68 | Arc::new(LineIndex::new(&*text)) |
73 | } | 69 | } |
diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs index 718b4def5..cd2ebe471 100644 --- a/crates/ra_ide_api/src/extend_selection.rs +++ b/crates/ra_ide_api/src/extend_selection.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use ra_db::SyntaxDatabase; | 1 | use ra_db::SourceDatabase; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | SyntaxNode, AstNode, SourceFile, | 3 | SyntaxNode, AstNode, SourceFile, |
4 | ast, algo::find_covering_node, | 4 | ast, algo::find_covering_node, |
@@ -10,7 +10,7 @@ use crate::{ | |||
10 | }; | 10 | }; |
11 | 11 | ||
12 | pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { | 12 | pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { |
13 | let source_file = db.source_file(frange.file_id); | 13 | let source_file = db.parse(frange.file_id); |
14 | if let Some(range) = extend_selection_in_macro(db, &source_file, frange) { | 14 | if let Some(range) = extend_selection_in_macro(db, &source_file, frange) { |
15 | return range; | 15 | return range; |
16 | } | 16 | } |
diff --git a/crates/ra_ide_api/src/goto_definition.rs b/crates/ra_ide_api/src/goto_definition.rs index 46bdde00d..2a20c20ee 100644 --- a/crates/ra_ide_api/src/goto_definition.rs +++ b/crates/ra_ide_api/src/goto_definition.rs | |||
@@ -1,8 +1,9 @@ | |||
1 | use ra_db::{FileId, SyntaxDatabase}; | 1 | use ra_db::{FileId, SourceDatabase}; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | AstNode, ast, | 3 | AstNode, ast, |
4 | algo::find_node_at_offset, | 4 | algo::find_node_at_offset, |
5 | }; | 5 | }; |
6 | use test_utils::tested_by; | ||
6 | 7 | ||
7 | use crate::{FilePosition, NavigationTarget, db::RootDatabase, RangeInfo}; | 8 | use crate::{FilePosition, NavigationTarget, db::RootDatabase, RangeInfo}; |
8 | 9 | ||
@@ -10,7 +11,7 @@ pub(crate) fn goto_definition( | |||
10 | db: &RootDatabase, | 11 | db: &RootDatabase, |
11 | position: FilePosition, | 12 | position: FilePosition, |
12 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { | 13 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { |
13 | let file = db.source_file(position.file_id); | 14 | let file = db.parse(position.file_id); |
14 | let syntax = file.syntax(); | 15 | let syntax = file.syntax(); |
15 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) { | 16 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) { |
16 | let navs = reference_definition(db, position.file_id, name_ref).to_vec(); | 17 | let navs = reference_definition(db, position.file_id, name_ref).to_vec(); |
@@ -60,6 +61,7 @@ pub(crate) fn reference_definition( | |||
60 | .parent() | 61 | .parent() |
61 | .and_then(ast::MethodCallExpr::cast) | 62 | .and_then(ast::MethodCallExpr::cast) |
62 | { | 63 | { |
64 | tested_by!(goto_definition_works_for_methods); | ||
63 | let infer_result = function.infer(db); | 65 | let infer_result = function.infer(db); |
64 | let syntax_mapping = function.body_syntax_mapping(db); | 66 | let syntax_mapping = function.body_syntax_mapping(db); |
65 | let expr = ast::Expr::cast(method_call.syntax()).unwrap(); | 67 | let expr = ast::Expr::cast(method_call.syntax()).unwrap(); |
@@ -70,6 +72,19 @@ pub(crate) fn reference_definition( | |||
70 | return Exact(NavigationTarget::from_function(db, func)); | 72 | return Exact(NavigationTarget::from_function(db, func)); |
71 | }; | 73 | }; |
72 | } | 74 | } |
75 | // It could also be a field access | ||
76 | if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::FieldExpr::cast) { | ||
77 | tested_by!(goto_definition_works_for_fields); | ||
78 | let infer_result = function.infer(db); | ||
79 | let syntax_mapping = function.body_syntax_mapping(db); | ||
80 | let expr = ast::Expr::cast(field_expr.syntax()).unwrap(); | ||
81 | if let Some(field) = syntax_mapping | ||
82 | .node_expr(expr) | ||
83 | .and_then(|it| infer_result.field_resolution(it)) | ||
84 | { | ||
85 | return Exact(NavigationTarget::from_field(db, field)); | ||
86 | }; | ||
87 | } | ||
73 | } | 88 | } |
74 | // Then try module name resolution | 89 | // Then try module name resolution |
75 | if let Some(module) = hir::source_binder::module_from_child_node(db, file_id, name_ref.syntax()) | 90 | if let Some(module) = hir::source_binder::module_from_child_node(db, file_id, name_ref.syntax()) |
@@ -82,9 +97,7 @@ pub(crate) fn reference_definition( | |||
82 | { | 97 | { |
83 | let resolved = module.resolve_path(db, &path); | 98 | let resolved = module.resolve_path(db, &path); |
84 | if let Some(def_id) = resolved.take_types().or(resolved.take_values()) { | 99 | if let Some(def_id) = resolved.take_types().or(resolved.take_values()) { |
85 | if let Some(target) = NavigationTarget::from_def(db, def_id) { | 100 | return Exact(NavigationTarget::from_def(db, def_id)); |
86 | return Exact(target); | ||
87 | } | ||
88 | } | 101 | } |
89 | } | 102 | } |
90 | } | 103 | } |
@@ -117,6 +130,8 @@ fn name_definition( | |||
117 | 130 | ||
118 | #[cfg(test)] | 131 | #[cfg(test)] |
119 | mod tests { | 132 | mod tests { |
133 | use test_utils::covers; | ||
134 | |||
120 | use crate::mock_analysis::analysis_and_position; | 135 | use crate::mock_analysis::analysis_and_position; |
121 | 136 | ||
122 | fn check_goto(fixuture: &str, expected: &str) { | 137 | fn check_goto(fixuture: &str, expected: &str) { |
@@ -183,6 +198,7 @@ mod tests { | |||
183 | 198 | ||
184 | #[test] | 199 | #[test] |
185 | fn goto_definition_works_for_methods() { | 200 | fn goto_definition_works_for_methods() { |
201 | covers!(goto_definition_works_for_methods); | ||
186 | check_goto( | 202 | check_goto( |
187 | " | 203 | " |
188 | //- /lib.rs | 204 | //- /lib.rs |
@@ -197,15 +213,23 @@ mod tests { | |||
197 | ", | 213 | ", |
198 | "frobnicate FN_DEF FileId(1) [27; 52) [30; 40)", | 214 | "frobnicate FN_DEF FileId(1) [27; 52) [30; 40)", |
199 | ); | 215 | ); |
216 | } | ||
200 | 217 | ||
218 | #[test] | ||
219 | fn goto_definition_works_for_fields() { | ||
220 | covers!(goto_definition_works_for_fields); | ||
201 | check_goto( | 221 | check_goto( |
202 | " | 222 | " |
203 | //- /lib.rs | 223 | //- /lib.rs |
204 | mod <|>foo; | 224 | struct Foo { |
205 | //- /foo/mod.rs | 225 | spam: u32, |
206 | // empty | 226 | } |
227 | |||
228 | fn bar(foo: &Foo) { | ||
229 | foo.spam<|>; | ||
230 | } | ||
207 | ", | 231 | ", |
208 | "foo SOURCE_FILE FileId(2) [0; 10)", | 232 | "spam NAMED_FIELD_DEF FileId(1) [17; 26) [17; 21)", |
209 | ); | 233 | ); |
210 | } | 234 | } |
211 | } | 235 | } |
diff --git a/crates/ra_ide_api/src/hover.rs b/crates/ra_ide_api/src/hover.rs index 4d4bfbc4d..ff9ae2d9c 100644 --- a/crates/ra_ide_api/src/hover.rs +++ b/crates/ra_ide_api/src/hover.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use ra_db::{SyntaxDatabase}; | 1 | use ra_db::SourceDatabase; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | AstNode, SyntaxNode, TreeArc, ast, | 3 | AstNode, SyntaxNode, TreeArc, ast, |
4 | algo::{find_covering_node, find_node_at_offset, find_leaf_at_offset, visit::{visitor, Visitor}}, | 4 | algo::{find_covering_node, find_node_at_offset, find_leaf_at_offset, visit::{visitor, Visitor}}, |
@@ -7,7 +7,7 @@ use ra_syntax::{ | |||
7 | use crate::{db::RootDatabase, RangeInfo, FilePosition, FileRange, NavigationTarget}; | 7 | use crate::{db::RootDatabase, RangeInfo, FilePosition, FileRange, NavigationTarget}; |
8 | 8 | ||
9 | pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<String>> { | 9 | pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<String>> { |
10 | let file = db.source_file(position.file_id); | 10 | let file = db.parse(position.file_id); |
11 | let mut res = Vec::new(); | 11 | let mut res = Vec::new(); |
12 | 12 | ||
13 | let mut range = None; | 13 | let mut range = None; |
@@ -53,7 +53,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn | |||
53 | } | 53 | } |
54 | 54 | ||
55 | pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> { | 55 | pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> { |
56 | let file = db.source_file(frange.file_id); | 56 | let file = db.parse(frange.file_id); |
57 | let syntax = file.syntax(); | 57 | let syntax = file.syntax(); |
58 | let leaf_node = find_covering_node(syntax, frange.range); | 58 | let leaf_node = find_covering_node(syntax, frange.range); |
59 | // if we picked identifier, expand to pattern/expression | 59 | // if we picked identifier, expand to pattern/expression |
@@ -88,7 +88,7 @@ fn doc_text_for(db: &RootDatabase, nav: NavigationTarget) -> Option<String> { | |||
88 | 88 | ||
89 | impl NavigationTarget { | 89 | impl NavigationTarget { |
90 | fn node(&self, db: &RootDatabase) -> Option<TreeArc<SyntaxNode>> { | 90 | fn node(&self, db: &RootDatabase) -> Option<TreeArc<SyntaxNode>> { |
91 | let source_file = db.source_file(self.file_id()); | 91 | let source_file = db.parse(self.file_id()); |
92 | let source_file = source_file.syntax(); | 92 | let source_file = source_file.syntax(); |
93 | let node = source_file | 93 | let node = source_file |
94 | .descendants() | 94 | .descendants() |
diff --git a/crates/ra_ide_api/src/imp.rs b/crates/ra_ide_api/src/imp.rs index 961f7b230..399433a01 100644 --- a/crates/ra_ide_api/src/imp.rs +++ b/crates/ra_ide_api/src/imp.rs | |||
@@ -4,8 +4,8 @@ use hir::{ | |||
4 | self, Problem, source_binder | 4 | self, Problem, source_binder |
5 | }; | 5 | }; |
6 | use ra_db::{ | 6 | use ra_db::{ |
7 | FilesDatabase, SourceRoot, SourceRootId, SyntaxDatabase, | 7 | SourceDatabase, SourceRoot, SourceRootId, |
8 | salsa::Database, | 8 | salsa::{Database, SweepStrategy}, |
9 | }; | 9 | }; |
10 | use ra_ide_api_light::{self, assists, LocalEdit, Severity}; | 10 | use ra_ide_api_light::{self, assists, LocalEdit, Severity}; |
11 | use ra_syntax::{ | 11 | use ra_syntax::{ |
@@ -76,14 +76,14 @@ impl db::RootDatabase { | |||
76 | /// syntax trees. However, if we actually do that, everything is recomputed | 76 | /// syntax trees. However, if we actually do that, everything is recomputed |
77 | /// for some reason. Needs investigation. | 77 | /// for some reason. Needs investigation. |
78 | pub(crate) fn collect_garbage(&mut self) { | 78 | pub(crate) fn collect_garbage(&mut self) { |
79 | self.query(ra_db::SourceFileQuery) | 79 | self.query(ra_db::ParseQuery) |
80 | .sweep(salsa::SweepStrategy::default().discard_values()); | 80 | .sweep(SweepStrategy::default().discard_values()); |
81 | self.query(hir::db::HirSourceFileQuery) | 81 | self.query(hir::db::HirParseQuery) |
82 | .sweep(salsa::SweepStrategy::default().discard_values()); | 82 | .sweep(SweepStrategy::default().discard_values()); |
83 | self.query(hir::db::FileItemsQuery) | 83 | self.query(hir::db::FileItemsQuery) |
84 | .sweep(salsa::SweepStrategy::default().discard_values()); | 84 | .sweep(SweepStrategy::default().discard_values()); |
85 | self.query(hir::db::FileItemQuery) | 85 | self.query(hir::db::FileItemQuery) |
86 | .sweep(salsa::SweepStrategy::default().discard_values()); | 86 | .sweep(SweepStrategy::default().discard_values()); |
87 | } | 87 | } |
88 | } | 88 | } |
89 | 89 | ||
@@ -102,7 +102,7 @@ impl db::RootDatabase { | |||
102 | } | 102 | } |
103 | 103 | ||
104 | pub(crate) fn find_all_refs(&self, position: FilePosition) -> Vec<(FileId, TextRange)> { | 104 | pub(crate) fn find_all_refs(&self, position: FilePosition) -> Vec<(FileId, TextRange)> { |
105 | let file = self.source_file(position.file_id); | 105 | let file = self.parse(position.file_id); |
106 | // Find the binding associated with the offset | 106 | // Find the binding associated with the offset |
107 | let (binding, descr) = match find_binding(self, &file, position) { | 107 | let (binding, descr) = match find_binding(self, &file, position) { |
108 | None => return Vec::new(), | 108 | None => return Vec::new(), |
@@ -150,7 +150,7 @@ impl db::RootDatabase { | |||
150 | } | 150 | } |
151 | 151 | ||
152 | pub(crate) fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> { | 152 | pub(crate) fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> { |
153 | let syntax = self.source_file(file_id); | 153 | let syntax = self.parse(file_id); |
154 | 154 | ||
155 | let mut res = ra_ide_api_light::diagnostics(&syntax) | 155 | let mut res = ra_ide_api_light::diagnostics(&syntax) |
156 | .into_iter() | 156 | .into_iter() |
@@ -214,7 +214,7 @@ impl db::RootDatabase { | |||
214 | } | 214 | } |
215 | 215 | ||
216 | pub(crate) fn assists(&self, frange: FileRange) -> Vec<SourceChange> { | 216 | pub(crate) fn assists(&self, frange: FileRange) -> Vec<SourceChange> { |
217 | let file = self.source_file(frange.file_id); | 217 | let file = self.parse(frange.file_id); |
218 | assists::assists(&file, frange.range) | 218 | assists::assists(&file, frange.range) |
219 | .into_iter() | 219 | .into_iter() |
220 | .map(|local_edit| SourceChange::from_local_edit(frange.file_id, local_edit)) | 220 | .map(|local_edit| SourceChange::from_local_edit(frange.file_id, local_edit)) |
diff --git a/crates/ra_ide_api/src/lib.rs b/crates/ra_ide_api/src/lib.rs index ffd026b04..43c8bea71 100644 --- a/crates/ra_ide_api/src/lib.rs +++ b/crates/ra_ide_api/src/lib.rs | |||
@@ -34,7 +34,7 @@ use std::{fmt, sync::Arc}; | |||
34 | use ra_syntax::{SourceFile, TreeArc, TextRange, TextUnit}; | 34 | use ra_syntax::{SourceFile, TreeArc, TextRange, TextUnit}; |
35 | use ra_text_edit::TextEdit; | 35 | use ra_text_edit::TextEdit; |
36 | use ra_db::{ | 36 | use ra_db::{ |
37 | SyntaxDatabase, FilesDatabase, BaseDatabase, | 37 | SourceDatabase, CheckCanceled, |
38 | salsa::{self, ParallelDatabase}, | 38 | salsa::{self, ParallelDatabase}, |
39 | }; | 39 | }; |
40 | use rayon::prelude::*; | 40 | use rayon::prelude::*; |
@@ -313,7 +313,7 @@ impl Analysis { | |||
313 | 313 | ||
314 | /// Gets the syntax tree of the file. | 314 | /// Gets the syntax tree of the file. |
315 | pub fn parse(&self, file_id: FileId) -> TreeArc<SourceFile> { | 315 | pub fn parse(&self, file_id: FileId) -> TreeArc<SourceFile> { |
316 | self.db.source_file(file_id).clone() | 316 | self.db.parse(file_id).clone() |
317 | } | 317 | } |
318 | 318 | ||
319 | /// Gets the file's `LineIndex`: data structure to convert between absolute | 319 | /// Gets the file's `LineIndex`: data structure to convert between absolute |
@@ -330,21 +330,21 @@ impl Analysis { | |||
330 | /// Returns position of the mathcing brace (all types of braces are | 330 | /// Returns position of the mathcing brace (all types of braces are |
331 | /// supported). | 331 | /// supported). |
332 | pub fn matching_brace(&self, position: FilePosition) -> Option<TextUnit> { | 332 | pub fn matching_brace(&self, position: FilePosition) -> Option<TextUnit> { |
333 | let file = self.db.source_file(position.file_id); | 333 | let file = self.db.parse(position.file_id); |
334 | ra_ide_api_light::matching_brace(&file, position.offset) | 334 | ra_ide_api_light::matching_brace(&file, position.offset) |
335 | } | 335 | } |
336 | 336 | ||
337 | /// Returns a syntax tree represented as `String`, for debug purposes. | 337 | /// Returns a syntax tree represented as `String`, for debug purposes. |
338 | // FIXME: use a better name here. | 338 | // FIXME: use a better name here. |
339 | pub fn syntax_tree(&self, file_id: FileId) -> String { | 339 | pub fn syntax_tree(&self, file_id: FileId) -> String { |
340 | let file = self.db.source_file(file_id); | 340 | let file = self.db.parse(file_id); |
341 | ra_ide_api_light::syntax_tree(&file) | 341 | ra_ide_api_light::syntax_tree(&file) |
342 | } | 342 | } |
343 | 343 | ||
344 | /// Returns an edit to remove all newlines in the range, cleaning up minor | 344 | /// Returns an edit to remove all newlines in the range, cleaning up minor |
345 | /// stuff like trailing commas. | 345 | /// stuff like trailing commas. |
346 | pub fn join_lines(&self, frange: FileRange) -> SourceChange { | 346 | pub fn join_lines(&self, frange: FileRange) -> SourceChange { |
347 | let file = self.db.source_file(frange.file_id); | 347 | let file = self.db.parse(frange.file_id); |
348 | SourceChange::from_local_edit( | 348 | SourceChange::from_local_edit( |
349 | frange.file_id, | 349 | frange.file_id, |
350 | ra_ide_api_light::join_lines(&file, frange.range), | 350 | ra_ide_api_light::join_lines(&file, frange.range), |
@@ -354,7 +354,7 @@ impl Analysis { | |||
354 | /// Returns an edit which should be applied when opening a new line, fixing | 354 | /// Returns an edit which should be applied when opening a new line, fixing |
355 | /// up minor stuff like continuing the comment. | 355 | /// up minor stuff like continuing the comment. |
356 | pub fn on_enter(&self, position: FilePosition) -> Option<SourceChange> { | 356 | pub fn on_enter(&self, position: FilePosition) -> Option<SourceChange> { |
357 | let file = self.db.source_file(position.file_id); | 357 | let file = self.db.parse(position.file_id); |
358 | let edit = ra_ide_api_light::on_enter(&file, position.offset)?; | 358 | let edit = ra_ide_api_light::on_enter(&file, position.offset)?; |
359 | Some(SourceChange::from_local_edit(position.file_id, edit)) | 359 | Some(SourceChange::from_local_edit(position.file_id, edit)) |
360 | } | 360 | } |
@@ -363,14 +363,14 @@ impl Analysis { | |||
363 | /// this works when adding `let =`. | 363 | /// this works when adding `let =`. |
364 | // FIXME: use a snippet completion instead of this hack here. | 364 | // FIXME: use a snippet completion instead of this hack here. |
365 | pub fn on_eq_typed(&self, position: FilePosition) -> Option<SourceChange> { | 365 | pub fn on_eq_typed(&self, position: FilePosition) -> Option<SourceChange> { |
366 | let file = self.db.source_file(position.file_id); | 366 | let file = self.db.parse(position.file_id); |
367 | let edit = ra_ide_api_light::on_eq_typed(&file, position.offset)?; | 367 | let edit = ra_ide_api_light::on_eq_typed(&file, position.offset)?; |
368 | Some(SourceChange::from_local_edit(position.file_id, edit)) | 368 | Some(SourceChange::from_local_edit(position.file_id, edit)) |
369 | } | 369 | } |
370 | 370 | ||
371 | /// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately. | 371 | /// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately. |
372 | pub fn on_dot_typed(&self, position: FilePosition) -> Option<SourceChange> { | 372 | pub fn on_dot_typed(&self, position: FilePosition) -> Option<SourceChange> { |
373 | let file = self.db.source_file(position.file_id); | 373 | let file = self.db.parse(position.file_id); |
374 | let edit = ra_ide_api_light::on_dot_typed(&file, position.offset)?; | 374 | let edit = ra_ide_api_light::on_dot_typed(&file, position.offset)?; |
375 | Some(SourceChange::from_local_edit(position.file_id, edit)) | 375 | Some(SourceChange::from_local_edit(position.file_id, edit)) |
376 | } | 376 | } |
@@ -378,13 +378,13 @@ impl Analysis { | |||
378 | /// Returns a tree representation of symbols in the file. Useful to draw a | 378 | /// Returns a tree representation of symbols in the file. Useful to draw a |
379 | /// file outline. | 379 | /// file outline. |
380 | pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> { | 380 | pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> { |
381 | let file = self.db.source_file(file_id); | 381 | let file = self.db.parse(file_id); |
382 | ra_ide_api_light::file_structure(&file) | 382 | ra_ide_api_light::file_structure(&file) |
383 | } | 383 | } |
384 | 384 | ||
385 | /// Returns the set of folding ranges. | 385 | /// Returns the set of folding ranges. |
386 | pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> { | 386 | pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> { |
387 | let file = self.db.source_file(file_id); | 387 | let file = self.db.parse(file_id); |
388 | ra_ide_api_light::folding_ranges(&file) | 388 | ra_ide_api_light::folding_ranges(&file) |
389 | } | 389 | } |
390 | 390 | ||
diff --git a/crates/ra_ide_api/src/marks.rs b/crates/ra_ide_api/src/marks.rs index dc5b2702a..e33bf6c91 100644 --- a/crates/ra_ide_api/src/marks.rs +++ b/crates/ra_ide_api/src/marks.rs | |||
@@ -1 +1,5 @@ | |||
1 | test_utils::marks!(inserts_parens_for_function_calls); | 1 | test_utils::marks!( |
2 | inserts_parens_for_function_calls | ||
3 | goto_definition_works_for_methods | ||
4 | goto_definition_works_for_fields | ||
5 | ); | ||
diff --git a/crates/ra_ide_api/src/navigation_target.rs b/crates/ra_ide_api/src/navigation_target.rs index c5be8e01b..d73d4afa7 100644 --- a/crates/ra_ide_api/src/navigation_target.rs +++ b/crates/ra_ide_api/src/navigation_target.rs | |||
@@ -3,7 +3,7 @@ use ra_syntax::{ | |||
3 | SyntaxNode, AstNode, SmolStr, TextRange, ast, | 3 | SyntaxNode, AstNode, SmolStr, TextRange, ast, |
4 | SyntaxKind::{self, NAME}, | 4 | SyntaxKind::{self, NAME}, |
5 | }; | 5 | }; |
6 | use hir::{ModuleSource}; | 6 | use hir::{ModuleSource, FieldSource}; |
7 | 7 | ||
8 | use crate::{FileSymbol, db::RootDatabase}; | 8 | use crate::{FileSymbol, db::RootDatabase}; |
9 | 9 | ||
@@ -101,62 +101,48 @@ impl NavigationTarget { | |||
101 | NavigationTarget::from_named(file_id.original_file(db), &*fn_def) | 101 | NavigationTarget::from_named(file_id.original_file(db), &*fn_def) |
102 | } | 102 | } |
103 | 103 | ||
104 | // TODO once Def::Item is gone, this should be able to always return a NavigationTarget | 104 | pub(crate) fn from_field(db: &RootDatabase, field: hir::StructField) -> NavigationTarget { |
105 | pub(crate) fn from_def( | 105 | let (file_id, field) = field.source(db); |
106 | db: &RootDatabase, | 106 | let file_id = file_id.original_file(db); |
107 | module_def: hir::ModuleDef, | 107 | match field { |
108 | ) -> Option<NavigationTarget> { | 108 | FieldSource::Named(it) => NavigationTarget::from_named(file_id, &*it), |
109 | FieldSource::Pos(it) => { | ||
110 | NavigationTarget::from_syntax(file_id, "".into(), None, it.syntax()) | ||
111 | } | ||
112 | } | ||
113 | } | ||
114 | |||
115 | pub(crate) fn from_def(db: &RootDatabase, module_def: hir::ModuleDef) -> NavigationTarget { | ||
109 | match module_def { | 116 | match module_def { |
110 | hir::ModuleDef::Module(module) => Some(NavigationTarget::from_module(db, module)), | 117 | hir::ModuleDef::Module(module) => NavigationTarget::from_module(db, module), |
111 | hir::ModuleDef::Function(func) => Some(NavigationTarget::from_function(db, func)), | 118 | hir::ModuleDef::Function(func) => NavigationTarget::from_function(db, func), |
112 | hir::ModuleDef::Struct(s) => { | 119 | hir::ModuleDef::Struct(s) => { |
113 | let (file_id, node) = s.source(db); | 120 | let (file_id, node) = s.source(db); |
114 | Some(NavigationTarget::from_named( | 121 | NavigationTarget::from_named(file_id.original_file(db), &*node) |
115 | file_id.original_file(db), | ||
116 | &*node, | ||
117 | )) | ||
118 | } | 122 | } |
119 | hir::ModuleDef::Const(s) => { | 123 | hir::ModuleDef::Const(s) => { |
120 | let (file_id, node) = s.source(db); | 124 | let (file_id, node) = s.source(db); |
121 | Some(NavigationTarget::from_named( | 125 | NavigationTarget::from_named(file_id.original_file(db), &*node) |
122 | file_id.original_file(db), | ||
123 | &*node, | ||
124 | )) | ||
125 | } | 126 | } |
126 | hir::ModuleDef::Static(s) => { | 127 | hir::ModuleDef::Static(s) => { |
127 | let (file_id, node) = s.source(db); | 128 | let (file_id, node) = s.source(db); |
128 | Some(NavigationTarget::from_named( | 129 | NavigationTarget::from_named(file_id.original_file(db), &*node) |
129 | file_id.original_file(db), | ||
130 | &*node, | ||
131 | )) | ||
132 | } | 130 | } |
133 | hir::ModuleDef::Enum(e) => { | 131 | hir::ModuleDef::Enum(e) => { |
134 | let (file_id, node) = e.source(db); | 132 | let (file_id, node) = e.source(db); |
135 | Some(NavigationTarget::from_named( | 133 | NavigationTarget::from_named(file_id.original_file(db), &*node) |
136 | file_id.original_file(db), | ||
137 | &*node, | ||
138 | )) | ||
139 | } | 134 | } |
140 | hir::ModuleDef::EnumVariant(var) => { | 135 | hir::ModuleDef::EnumVariant(var) => { |
141 | let (file_id, node) = var.source(db); | 136 | let (file_id, node) = var.source(db); |
142 | Some(NavigationTarget::from_named( | 137 | NavigationTarget::from_named(file_id.original_file(db), &*node) |
143 | file_id.original_file(db), | ||
144 | &*node, | ||
145 | )) | ||
146 | } | 138 | } |
147 | hir::ModuleDef::Trait(e) => { | 139 | hir::ModuleDef::Trait(e) => { |
148 | let (file_id, node) = e.source(db); | 140 | let (file_id, node) = e.source(db); |
149 | Some(NavigationTarget::from_named( | 141 | NavigationTarget::from_named(file_id.original_file(db), &*node) |
150 | file_id.original_file(db), | ||
151 | &*node, | ||
152 | )) | ||
153 | } | 142 | } |
154 | hir::ModuleDef::Type(e) => { | 143 | hir::ModuleDef::Type(e) => { |
155 | let (file_id, node) = e.source(db); | 144 | let (file_id, node) = e.source(db); |
156 | Some(NavigationTarget::from_named( | 145 | NavigationTarget::from_named(file_id.original_file(db), &*node) |
157 | file_id.original_file(db), | ||
158 | &*node, | ||
159 | )) | ||
160 | } | 146 | } |
161 | } | 147 | } |
162 | } | 148 | } |
diff --git a/crates/ra_ide_api/src/rename.rs b/crates/ra_ide_api/src/rename.rs index 5b767addd..db5ccf969 100644 --- a/crates/ra_ide_api/src/rename.rs +++ b/crates/ra_ide_api/src/rename.rs | |||
@@ -17,7 +17,7 @@ use crate::{ | |||
17 | SourceChange, | 17 | SourceChange, |
18 | SourceFileEdit, | 18 | SourceFileEdit, |
19 | }; | 19 | }; |
20 | use ra_db::{FilesDatabase, SyntaxDatabase}; | 20 | use ra_db::SourceDatabase; |
21 | use relative_path::RelativePath; | 21 | use relative_path::RelativePath; |
22 | 22 | ||
23 | pub(crate) fn rename( | 23 | pub(crate) fn rename( |
@@ -25,7 +25,7 @@ pub(crate) fn rename( | |||
25 | position: FilePosition, | 25 | position: FilePosition, |
26 | new_name: &str, | 26 | new_name: &str, |
27 | ) -> Option<SourceChange> { | 27 | ) -> Option<SourceChange> { |
28 | let source_file = db.source_file(position.file_id); | 28 | let source_file = db.parse(position.file_id); |
29 | let syntax = source_file.syntax(); | 29 | let syntax = source_file.syntax(); |
30 | 30 | ||
31 | if let Some((ast_name, ast_module)) = find_name_and_module_at_offset(syntax, position) { | 31 | if let Some((ast_name, ast_module)) = find_name_and_module_at_offset(syntax, position) { |
diff --git a/crates/ra_ide_api/src/runnables.rs b/crates/ra_ide_api/src/runnables.rs index 0f9f8deb3..dc8c40ea6 100644 --- a/crates/ra_ide_api/src/runnables.rs +++ b/crates/ra_ide_api/src/runnables.rs | |||
@@ -3,7 +3,7 @@ use ra_syntax::{ | |||
3 | TextRange, SyntaxNode, | 3 | TextRange, SyntaxNode, |
4 | ast::{self, AstNode, NameOwner, ModuleItemOwner}, | 4 | ast::{self, AstNode, NameOwner, ModuleItemOwner}, |
5 | }; | 5 | }; |
6 | use ra_db::SyntaxDatabase; | 6 | use ra_db::SourceDatabase; |
7 | 7 | ||
8 | use crate::{db::RootDatabase, FileId}; | 8 | use crate::{db::RootDatabase, FileId}; |
9 | 9 | ||
@@ -22,7 +22,7 @@ pub enum RunnableKind { | |||
22 | } | 22 | } |
23 | 23 | ||
24 | pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { | 24 | pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { |
25 | let source_file = db.source_file(file_id); | 25 | let source_file = db.parse(file_id); |
26 | source_file | 26 | source_file |
27 | .syntax() | 27 | .syntax() |
28 | .descendants() | 28 | .descendants() |
diff --git a/crates/ra_ide_api/src/status.rs b/crates/ra_ide_api/src/status.rs index 717537fcd..e11eed223 100644 --- a/crates/ra_ide_api/src/status.rs +++ b/crates/ra_ide_api/src/status.rs | |||
@@ -6,7 +6,7 @@ use std::{ | |||
6 | 6 | ||
7 | use ra_syntax::{AstNode, TreeArc, SourceFile}; | 7 | use ra_syntax::{AstNode, TreeArc, SourceFile}; |
8 | use ra_db::{ | 8 | use ra_db::{ |
9 | SourceFileQuery, FileTextQuery, SourceRootId, | 9 | ParseQuery, FileTextQuery, SourceRootId, |
10 | salsa::{Database, debug::{DebugQueryTable, TableEntry}}, | 10 | salsa::{Database, debug::{DebugQueryTable, TableEntry}}, |
11 | }; | 11 | }; |
12 | 12 | ||
@@ -17,7 +17,7 @@ use crate::{ | |||
17 | 17 | ||
18 | pub(crate) fn status(db: &RootDatabase) -> String { | 18 | pub(crate) fn status(db: &RootDatabase) -> String { |
19 | let files_stats = db.query(FileTextQuery).entries::<FilesStats>(); | 19 | let files_stats = db.query(FileTextQuery).entries::<FilesStats>(); |
20 | let syntax_tree_stats = db.query(SourceFileQuery).entries::<SyntaxTreeStats>(); | 20 | let syntax_tree_stats = db.query(ParseQuery).entries::<SyntaxTreeStats>(); |
21 | let symbols_stats = db | 21 | let symbols_stats = db |
22 | .query(LibrarySymbolsQuery) | 22 | .query(LibrarySymbolsQuery) |
23 | .entries::<LibrarySymbolsStats>(); | 23 | .entries::<LibrarySymbolsStats>(); |
@@ -94,17 +94,12 @@ impl FromIterator<TableEntry<FileId, TreeArc<SourceFile>>> for SyntaxTreeStats { | |||
94 | #[derive(Default)] | 94 | #[derive(Default)] |
95 | struct LibrarySymbolsStats { | 95 | struct LibrarySymbolsStats { |
96 | total: usize, | 96 | total: usize, |
97 | fst_size: Bytes, | 97 | size: Bytes, |
98 | symbols_size: Bytes, | ||
99 | } | 98 | } |
100 | 99 | ||
101 | impl fmt::Display for LibrarySymbolsStats { | 100 | impl fmt::Display for LibrarySymbolsStats { |
102 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | 101 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { |
103 | write!( | 102 | write!(fmt, "{} ({}) symbols", self.total, self.size,) |
104 | fmt, | ||
105 | "{} ({} + {}) symbols", | ||
106 | self.total, self.fst_size, self.symbols_size | ||
107 | ) | ||
108 | } | 103 | } |
109 | } | 104 | } |
110 | 105 | ||
@@ -117,8 +112,7 @@ impl FromIterator<TableEntry<SourceRootId, Arc<SymbolIndex>>> for LibrarySymbols | |||
117 | for entry in iter { | 112 | for entry in iter { |
118 | let value = entry.value.unwrap(); | 113 | let value = entry.value.unwrap(); |
119 | res.total += value.len(); | 114 | res.total += value.len(); |
120 | res.fst_size += value.fst_size(); | 115 | res.size += value.memory_size(); |
121 | res.symbols_size += value.symbols_size(); | ||
122 | } | 116 | } |
123 | res | 117 | res |
124 | } | 118 | } |
diff --git a/crates/ra_ide_api/src/symbol_index.rs b/crates/ra_ide_api/src/symbol_index.rs index 69a669b8d..72c93f530 100644 --- a/crates/ra_ide_api/src/symbol_index.rs +++ b/crates/ra_ide_api/src/symbol_index.rs | |||
@@ -34,7 +34,7 @@ use ra_syntax::{ | |||
34 | ast::{self, NameOwner}, | 34 | ast::{self, NameOwner}, |
35 | }; | 35 | }; |
36 | use ra_db::{ | 36 | use ra_db::{ |
37 | SourceRootId, FilesDatabase, | 37 | SourceRootId, SourceDatabase, |
38 | salsa::{self, ParallelDatabase}, | 38 | salsa::{self, ParallelDatabase}, |
39 | }; | 39 | }; |
40 | use rayon::prelude::*; | 40 | use rayon::prelude::*; |
@@ -44,16 +44,24 @@ use crate::{ | |||
44 | db::RootDatabase, | 44 | db::RootDatabase, |
45 | }; | 45 | }; |
46 | 46 | ||
47 | #[salsa::query_group] | 47 | #[salsa::query_group(SymbolsDatabaseStorage)] |
48 | pub(crate) trait SymbolsDatabase: hir::db::HirDatabase { | 48 | pub(crate) trait SymbolsDatabase: hir::db::HirDatabase { |
49 | fn file_symbols(&self, file_id: FileId) -> Arc<SymbolIndex>; | 49 | fn file_symbols(&self, file_id: FileId) -> Arc<SymbolIndex>; |
50 | #[salsa::input] | 50 | #[salsa::input] |
51 | fn library_symbols(&self, id: SourceRootId) -> Arc<SymbolIndex>; | 51 | fn library_symbols(&self, id: SourceRootId) -> Arc<SymbolIndex>; |
52 | /// The set of "local" (that is, from the current workspace) roots. | ||
53 | /// Files in local roots are assumed to change frequently. | ||
54 | #[salsa::input] | ||
55 | fn local_roots(&self) -> Arc<Vec<SourceRootId>>; | ||
56 | /// The set of roots for crates.io libraries. | ||
57 | /// Files in libraries are assumed to never change. | ||
58 | #[salsa::input] | ||
59 | fn library_roots(&self) -> Arc<Vec<SourceRootId>>; | ||
52 | } | 60 | } |
53 | 61 | ||
54 | fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> { | 62 | fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> { |
55 | db.check_canceled(); | 63 | db.check_canceled(); |
56 | let source_file = db.source_file(file_id); | 64 | let source_file = db.parse(file_id); |
57 | let mut symbols = source_file | 65 | let mut symbols = source_file |
58 | .syntax() | 66 | .syntax() |
59 | .descendants() | 67 | .descendants() |
@@ -137,12 +145,8 @@ impl SymbolIndex { | |||
137 | self.symbols.len() | 145 | self.symbols.len() |
138 | } | 146 | } |
139 | 147 | ||
140 | pub(crate) fn fst_size(&self) -> usize { | 148 | pub(crate) fn memory_size(&self) -> usize { |
141 | self.map.as_fst().size() | 149 | self.map.as_fst().size() + self.symbols.len() * mem::size_of::<FileSymbol>() |
142 | } | ||
143 | |||
144 | pub(crate) fn symbols_size(&self) -> usize { | ||
145 | self.symbols.len() * mem::size_of::<FileSymbol>() | ||
146 | } | 150 | } |
147 | 151 | ||
148 | pub(crate) fn for_files( | 152 | pub(crate) fn for_files( |
diff --git a/crates/ra_ide_api/src/syntax_highlighting.rs b/crates/ra_ide_api/src/syntax_highlighting.rs index a4d3ad005..26bde495b 100644 --- a/crates/ra_ide_api/src/syntax_highlighting.rs +++ b/crates/ra_ide_api/src/syntax_highlighting.rs | |||
@@ -1,5 +1,5 @@ | |||
1 | use ra_syntax::{ast, AstNode,}; | 1 | use ra_syntax::{ast, AstNode,}; |
2 | use ra_db::SyntaxDatabase; | 2 | use ra_db::SourceDatabase; |
3 | 3 | ||
4 | use crate::{ | 4 | use crate::{ |
5 | FileId, HighlightedRange, | 5 | FileId, HighlightedRange, |
@@ -7,7 +7,7 @@ use crate::{ | |||
7 | }; | 7 | }; |
8 | 8 | ||
9 | pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> { | 9 | pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> { |
10 | let source_file = db.source_file(file_id); | 10 | let source_file = db.parse(file_id); |
11 | let mut res = ra_ide_api_light::highlight(source_file.syntax()); | 11 | let mut res = ra_ide_api_light::highlight(source_file.syntax()); |
12 | for macro_call in source_file | 12 | for macro_call in source_file |
13 | .syntax() | 13 | .syntax() |
diff --git a/crates/ra_ide_api_light/src/extend_selection.rs b/crates/ra_ide_api_light/src/extend_selection.rs index db93db208..f396dfe3f 100644 --- a/crates/ra_ide_api_light/src/extend_selection.rs +++ b/crates/ra_ide_api_light/src/extend_selection.rs | |||
@@ -9,9 +9,9 @@ pub fn extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange | |||
9 | let list_kinds = [ | 9 | let list_kinds = [ |
10 | FIELD_PAT_LIST, | 10 | FIELD_PAT_LIST, |
11 | MATCH_ARM_LIST, | 11 | MATCH_ARM_LIST, |
12 | NAMED_FIELD_LIST, | ||
13 | NAMED_FIELD_DEF_LIST, | 12 | NAMED_FIELD_DEF_LIST, |
14 | POS_FIELD_LIST, | 13 | POS_FIELD_DEF_LIST, |
14 | NAMED_FIELD_LIST, | ||
15 | ENUM_VARIANT_LIST, | 15 | ENUM_VARIANT_LIST, |
16 | USE_TREE_LIST, | 16 | USE_TREE_LIST, |
17 | TYPE_PARAM_LIST, | 17 | TYPE_PARAM_LIST, |
diff --git a/crates/ra_ide_api_light/src/formatting.rs b/crates/ra_ide_api_light/src/formatting.rs index ca0fdb928..1f34b85d6 100644 --- a/crates/ra_ide_api_light/src/formatting.rs +++ b/crates/ra_ide_api_light/src/formatting.rs | |||
@@ -7,9 +7,22 @@ use ra_syntax::{ | |||
7 | 7 | ||
8 | /// If the node is on the beginning of the line, calculate indent. | 8 | /// If the node is on the beginning of the line, calculate indent. |
9 | pub(crate) fn leading_indent(node: &SyntaxNode) -> Option<&str> { | 9 | pub(crate) fn leading_indent(node: &SyntaxNode) -> Option<&str> { |
10 | let prev = prev_leaf(node)?; | 10 | for leaf in prev_leaves(node) { |
11 | let ws_text = ast::Whitespace::cast(prev)?.text(); | 11 | if let Some(ws) = ast::Whitespace::cast(leaf) { |
12 | ws_text.rfind('\n').map(|pos| &ws_text[pos + 1..]) | 12 | let ws_text = ws.text(); |
13 | if let Some(pos) = ws_text.rfind('\n') { | ||
14 | return Some(&ws_text[pos + 1..]); | ||
15 | } | ||
16 | } | ||
17 | if leaf.leaf_text().unwrap().contains('\n') { | ||
18 | break; | ||
19 | } | ||
20 | } | ||
21 | None | ||
22 | } | ||
23 | |||
24 | fn prev_leaves(node: &SyntaxNode) -> impl Iterator<Item = &SyntaxNode> { | ||
25 | generate(prev_leaf(node), |&node| prev_leaf(node)) | ||
13 | } | 26 | } |
14 | 27 | ||
15 | fn prev_leaf(node: &SyntaxNode) -> Option<&SyntaxNode> { | 28 | fn prev_leaf(node: &SyntaxNode) -> Option<&SyntaxNode> { |
diff --git a/crates/ra_ide_api_light/src/snapshots/tests__file_structure.snap b/crates/ra_ide_api_light/src/snapshots/tests__file_structure.snap index b96398950..270f75a56 100644 --- a/crates/ra_ide_api_light/src/snapshots/tests__file_structure.snap +++ b/crates/ra_ide_api_light/src/snapshots/tests__file_structure.snap | |||
@@ -1,8 +1,8 @@ | |||
1 | --- | 1 | --- |
2 | created: "2019-01-24T18:04:00.090162+00:00" | 2 | created: "2019-01-26T07:11:02.463391362+00:00" |
3 | creator: insta@0.4.0 | 3 | creator: insta@0.5.2 |
4 | expression: structure | 4 | expression: structure |
5 | source: "crates\\ra_ide_api_light\\src\\structure.rs" | 5 | source: crates/ra_ide_api_light/src/structure.rs |
6 | --- | 6 | --- |
7 | [ | 7 | [ |
8 | StructureNode { | 8 | StructureNode { |
@@ -78,6 +78,26 @@ source: "crates\\ra_ide_api_light\\src\\structure.rs" | |||
78 | detail: None | 78 | detail: None |
79 | }, | 79 | }, |
80 | StructureNode { | 80 | StructureNode { |
81 | parent: Some( | ||
82 | 6 | ||
83 | ), | ||
84 | label: "X", | ||
85 | navigation_range: [169; 170), | ||
86 | node_range: [169; 170), | ||
87 | kind: ENUM_VARIANT, | ||
88 | detail: None | ||
89 | }, | ||
90 | StructureNode { | ||
91 | parent: Some( | ||
92 | 6 | ||
93 | ), | ||
94 | label: "Y", | ||
95 | navigation_range: [172; 173), | ||
96 | node_range: [172; 178), | ||
97 | kind: ENUM_VARIANT, | ||
98 | detail: None | ||
99 | }, | ||
100 | StructureNode { | ||
81 | parent: None, | 101 | parent: None, |
82 | label: "T", | 102 | label: "T", |
83 | navigation_range: [186; 187), | 103 | navigation_range: [186; 187), |
diff --git a/crates/ra_ide_api_light/src/structure.rs b/crates/ra_ide_api_light/src/structure.rs index e3713c217..4e080ed03 100644 --- a/crates/ra_ide_api_light/src/structure.rs +++ b/crates/ra_ide_api_light/src/structure.rs | |||
@@ -103,6 +103,7 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> { | |||
103 | .visit(decl::<ast::StructDef>) | 103 | .visit(decl::<ast::StructDef>) |
104 | .visit(|nfd: &ast::NamedFieldDef| decl_with_type_ref(nfd, nfd.type_ref())) | 104 | .visit(|nfd: &ast::NamedFieldDef| decl_with_type_ref(nfd, nfd.type_ref())) |
105 | .visit(decl::<ast::EnumDef>) | 105 | .visit(decl::<ast::EnumDef>) |
106 | .visit(decl::<ast::EnumVariant>) | ||
106 | .visit(decl::<ast::TraitDef>) | 107 | .visit(decl::<ast::TraitDef>) |
107 | .visit(decl::<ast::Module>) | 108 | .visit(decl::<ast::Module>) |
108 | .visit(|td: &ast::TypeDef| decl_with_type_ref(td, td.type_ref())) | 109 | .visit(|td: &ast::TypeDef| decl_with_type_ref(td, td.type_ref())) |
diff --git a/crates/ra_ide_api_light/src/typing.rs b/crates/ra_ide_api_light/src/typing.rs index 5ff2b7c1f..861027b9f 100644 --- a/crates/ra_ide_api_light/src/typing.rs +++ b/crates/ra_ide_api_light/src/typing.rs | |||
@@ -296,6 +296,46 @@ fn foo() { | |||
296 | } | 296 | } |
297 | 297 | ||
298 | #[test] | 298 | #[test] |
299 | fn indents_middle_of_chain_call() { | ||
300 | type_dot( | ||
301 | r" | ||
302 | fn source_impl() { | ||
303 | let var = enum_defvariant_list().unwrap() | ||
304 | <|> | ||
305 | .nth(92) | ||
306 | .unwrap(); | ||
307 | } | ||
308 | ", | ||
309 | r" | ||
310 | fn source_impl() { | ||
311 | let var = enum_defvariant_list().unwrap() | ||
312 | . | ||
313 | .nth(92) | ||
314 | .unwrap(); | ||
315 | } | ||
316 | ", | ||
317 | ); | ||
318 | type_dot( | ||
319 | r" | ||
320 | fn source_impl() { | ||
321 | let var = enum_defvariant_list().unwrap() | ||
322 | <|> | ||
323 | .nth(92) | ||
324 | .unwrap(); | ||
325 | } | ||
326 | ", | ||
327 | r" | ||
328 | fn source_impl() { | ||
329 | let var = enum_defvariant_list().unwrap() | ||
330 | . | ||
331 | .nth(92) | ||
332 | .unwrap(); | ||
333 | } | ||
334 | ", | ||
335 | ); | ||
336 | } | ||
337 | |||
338 | #[test] | ||
299 | fn dont_indent_freestanding_dot() { | 339 | fn dont_indent_freestanding_dot() { |
300 | type_dot( | 340 | type_dot( |
301 | r" | 341 | r" |
diff --git a/crates/ra_syntax/src/ast.rs b/crates/ra_syntax/src/ast.rs index 165b308d1..00c60ebf3 100644 --- a/crates/ra_syntax/src/ast.rs +++ b/crates/ra_syntax/src/ast.rs | |||
@@ -407,7 +407,7 @@ impl<'a, N: AstNode + 'a> Iterator for AstChildren<'a, N> { | |||
407 | 407 | ||
408 | #[derive(Debug, Clone, PartialEq, Eq)] | 408 | #[derive(Debug, Clone, PartialEq, Eq)] |
409 | pub enum StructFlavor<'a> { | 409 | pub enum StructFlavor<'a> { |
410 | Tuple(&'a PosFieldList), | 410 | Tuple(&'a PosFieldDefList), |
411 | Named(&'a NamedFieldDefList), | 411 | Named(&'a NamedFieldDefList), |
412 | Unit, | 412 | Unit, |
413 | } | 413 | } |
@@ -416,7 +416,7 @@ impl StructFlavor<'_> { | |||
416 | fn from_node<N: AstNode>(node: &N) -> StructFlavor { | 416 | fn from_node<N: AstNode>(node: &N) -> StructFlavor { |
417 | if let Some(nfdl) = child_opt::<_, NamedFieldDefList>(node) { | 417 | if let Some(nfdl) = child_opt::<_, NamedFieldDefList>(node) { |
418 | StructFlavor::Named(nfdl) | 418 | StructFlavor::Named(nfdl) |
419 | } else if let Some(pfl) = child_opt::<_, PosFieldList>(node) { | 419 | } else if let Some(pfl) = child_opt::<_, PosFieldDefList>(node) { |
420 | StructFlavor::Tuple(pfl) | 420 | StructFlavor::Tuple(pfl) |
421 | } else { | 421 | } else { |
422 | StructFlavor::Unit | 422 | StructFlavor::Unit |
diff --git a/crates/ra_syntax/src/ast/generated.rs b/crates/ra_syntax/src/ast/generated.rs index 7da19d782..3ace6533c 100644 --- a/crates/ra_syntax/src/ast/generated.rs +++ b/crates/ra_syntax/src/ast/generated.rs | |||
@@ -2248,6 +2248,7 @@ impl ToOwned for NamedFieldDef { | |||
2248 | impl ast::VisibilityOwner for NamedFieldDef {} | 2248 | impl ast::VisibilityOwner for NamedFieldDef {} |
2249 | impl ast::NameOwner for NamedFieldDef {} | 2249 | impl ast::NameOwner for NamedFieldDef {} |
2250 | impl ast::AttrsOwner for NamedFieldDef {} | 2250 | impl ast::AttrsOwner for NamedFieldDef {} |
2251 | impl ast::DocCommentsOwner for NamedFieldDef {} | ||
2251 | impl NamedFieldDef { | 2252 | impl NamedFieldDef { |
2252 | pub fn type_ref(&self) -> Option<&TypeRef> { | 2253 | pub fn type_ref(&self) -> Option<&TypeRef> { |
2253 | super::child_opt(self) | 2254 | super::child_opt(self) |
@@ -2850,68 +2851,68 @@ impl PointerType { | |||
2850 | } | 2851 | } |
2851 | } | 2852 | } |
2852 | 2853 | ||
2853 | // PosField | 2854 | // PosFieldDef |
2854 | #[derive(Debug, PartialEq, Eq, Hash)] | 2855 | #[derive(Debug, PartialEq, Eq, Hash)] |
2855 | #[repr(transparent)] | 2856 | #[repr(transparent)] |
2856 | pub struct PosField { | 2857 | pub struct PosFieldDef { |
2857 | pub(crate) syntax: SyntaxNode, | 2858 | pub(crate) syntax: SyntaxNode, |
2858 | } | 2859 | } |
2859 | unsafe impl TransparentNewType for PosField { | 2860 | unsafe impl TransparentNewType for PosFieldDef { |
2860 | type Repr = rowan::SyntaxNode<RaTypes>; | 2861 | type Repr = rowan::SyntaxNode<RaTypes>; |
2861 | } | 2862 | } |
2862 | 2863 | ||
2863 | impl AstNode for PosField { | 2864 | impl AstNode for PosFieldDef { |
2864 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 2865 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { |
2865 | match syntax.kind() { | 2866 | match syntax.kind() { |
2866 | POS_FIELD => Some(PosField::from_repr(syntax.into_repr())), | 2867 | POS_FIELD_DEF => Some(PosFieldDef::from_repr(syntax.into_repr())), |
2867 | _ => None, | 2868 | _ => None, |
2868 | } | 2869 | } |
2869 | } | 2870 | } |
2870 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 2871 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
2871 | } | 2872 | } |
2872 | 2873 | ||
2873 | impl ToOwned for PosField { | 2874 | impl ToOwned for PosFieldDef { |
2874 | type Owned = TreeArc<PosField>; | 2875 | type Owned = TreeArc<PosFieldDef>; |
2875 | fn to_owned(&self) -> TreeArc<PosField> { TreeArc::cast(self.syntax.to_owned()) } | 2876 | fn to_owned(&self) -> TreeArc<PosFieldDef> { TreeArc::cast(self.syntax.to_owned()) } |
2876 | } | 2877 | } |
2877 | 2878 | ||
2878 | 2879 | ||
2879 | impl ast::VisibilityOwner for PosField {} | 2880 | impl ast::VisibilityOwner for PosFieldDef {} |
2880 | impl ast::AttrsOwner for PosField {} | 2881 | impl ast::AttrsOwner for PosFieldDef {} |
2881 | impl PosField { | 2882 | impl PosFieldDef { |
2882 | pub fn type_ref(&self) -> Option<&TypeRef> { | 2883 | pub fn type_ref(&self) -> Option<&TypeRef> { |
2883 | super::child_opt(self) | 2884 | super::child_opt(self) |
2884 | } | 2885 | } |
2885 | } | 2886 | } |
2886 | 2887 | ||
2887 | // PosFieldList | 2888 | // PosFieldDefList |
2888 | #[derive(Debug, PartialEq, Eq, Hash)] | 2889 | #[derive(Debug, PartialEq, Eq, Hash)] |
2889 | #[repr(transparent)] | 2890 | #[repr(transparent)] |
2890 | pub struct PosFieldList { | 2891 | pub struct PosFieldDefList { |
2891 | pub(crate) syntax: SyntaxNode, | 2892 | pub(crate) syntax: SyntaxNode, |
2892 | } | 2893 | } |
2893 | unsafe impl TransparentNewType for PosFieldList { | 2894 | unsafe impl TransparentNewType for PosFieldDefList { |
2894 | type Repr = rowan::SyntaxNode<RaTypes>; | 2895 | type Repr = rowan::SyntaxNode<RaTypes>; |
2895 | } | 2896 | } |
2896 | 2897 | ||
2897 | impl AstNode for PosFieldList { | 2898 | impl AstNode for PosFieldDefList { |
2898 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { | 2899 | fn cast(syntax: &SyntaxNode) -> Option<&Self> { |
2899 | match syntax.kind() { | 2900 | match syntax.kind() { |
2900 | POS_FIELD_LIST => Some(PosFieldList::from_repr(syntax.into_repr())), | 2901 | POS_FIELD_DEF_LIST => Some(PosFieldDefList::from_repr(syntax.into_repr())), |
2901 | _ => None, | 2902 | _ => None, |
2902 | } | 2903 | } |
2903 | } | 2904 | } |
2904 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 2905 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
2905 | } | 2906 | } |
2906 | 2907 | ||
2907 | impl ToOwned for PosFieldList { | 2908 | impl ToOwned for PosFieldDefList { |
2908 | type Owned = TreeArc<PosFieldList>; | 2909 | type Owned = TreeArc<PosFieldDefList>; |
2909 | fn to_owned(&self) -> TreeArc<PosFieldList> { TreeArc::cast(self.syntax.to_owned()) } | 2910 | fn to_owned(&self) -> TreeArc<PosFieldDefList> { TreeArc::cast(self.syntax.to_owned()) } |
2910 | } | 2911 | } |
2911 | 2912 | ||
2912 | 2913 | ||
2913 | impl PosFieldList { | 2914 | impl PosFieldDefList { |
2914 | pub fn fields(&self) -> impl Iterator<Item = &PosField> { | 2915 | pub fn fields(&self) -> impl Iterator<Item = &PosFieldDef> { |
2915 | super::children(self) | 2916 | super::children(self) |
2916 | } | 2917 | } |
2917 | } | 2918 | } |
diff --git a/crates/ra_syntax/src/grammar.ron b/crates/ra_syntax/src/grammar.ron index cb5d6cde8..85fc79038 100644 --- a/crates/ra_syntax/src/grammar.ron +++ b/crates/ra_syntax/src/grammar.ron | |||
@@ -205,8 +205,8 @@ Grammar( | |||
205 | "ENUM_VARIANT", | 205 | "ENUM_VARIANT", |
206 | "NAMED_FIELD_DEF_LIST", | 206 | "NAMED_FIELD_DEF_LIST", |
207 | "NAMED_FIELD_DEF", | 207 | "NAMED_FIELD_DEF", |
208 | "POS_FIELD_LIST", | 208 | "POS_FIELD_DEF_LIST", |
209 | "POS_FIELD", | 209 | "POS_FIELD_DEF", |
210 | "ENUM_VARIANT_LIST", | 210 | "ENUM_VARIANT_LIST", |
211 | "ITEM_LIST", | 211 | "ITEM_LIST", |
212 | "ATTR", | 212 | "ATTR", |
@@ -268,9 +268,9 @@ Grammar( | |||
268 | ] | 268 | ] |
269 | ), | 269 | ), |
270 | "NamedFieldDefList": (collections: [["fields", "NamedFieldDef"]]), | 270 | "NamedFieldDefList": (collections: [["fields", "NamedFieldDef"]]), |
271 | "NamedFieldDef": ( traits: ["VisibilityOwner", "NameOwner", "AttrsOwner"], options: ["TypeRef"] ), | 271 | "NamedFieldDef": ( traits: ["VisibilityOwner", "NameOwner", "AttrsOwner", "DocCommentsOwner"], options: ["TypeRef"] ), |
272 | "PosFieldList": (collections: [["fields", "PosField"]]), | 272 | "PosFieldDefList": (collections: [["fields", "PosFieldDef"]]), |
273 | "PosField": ( traits: ["VisibilityOwner", "AttrsOwner"], options: ["TypeRef"]), | 273 | "PosFieldDef": ( traits: ["VisibilityOwner", "AttrsOwner"], options: ["TypeRef"]), |
274 | "EnumDef": ( traits: [ | 274 | "EnumDef": ( traits: [ |
275 | "VisibilityOwner", | 275 | "VisibilityOwner", |
276 | "NameOwner", | 276 | "NameOwner", |
diff --git a/crates/ra_syntax/src/grammar/items/nominal.rs b/crates/ra_syntax/src/grammar/items/nominal.rs index 0784fb7b1..ff9b38f9c 100644 --- a/crates/ra_syntax/src/grammar/items/nominal.rs +++ b/crates/ra_syntax/src/grammar/items/nominal.rs | |||
@@ -28,7 +28,7 @@ pub(super) fn struct_def(p: &mut Parser, kind: SyntaxKind) { | |||
28 | } | 28 | } |
29 | L_CURLY => named_field_def_list(p), | 29 | L_CURLY => named_field_def_list(p), |
30 | L_PAREN if kind == STRUCT_KW => { | 30 | L_PAREN if kind == STRUCT_KW => { |
31 | pos_field_list(p); | 31 | pos_field_def_list(p); |
32 | // test tuple_struct_where | 32 | // test tuple_struct_where |
33 | // struct Test<T>(T) where T: Clone; | 33 | // struct Test<T>(T) where T: Clone; |
34 | // struct Test<T>(T); | 34 | // struct Test<T>(T); |
@@ -74,7 +74,7 @@ pub(crate) fn enum_variant_list(p: &mut Parser) { | |||
74 | name(p); | 74 | name(p); |
75 | match p.current() { | 75 | match p.current() { |
76 | L_CURLY => named_field_def_list(p), | 76 | L_CURLY => named_field_def_list(p), |
77 | L_PAREN => pos_field_list(p), | 77 | L_PAREN => pos_field_def_list(p), |
78 | EQ => { | 78 | EQ => { |
79 | p.bump(); | 79 | p.bump(); |
80 | expressions::expr(p); | 80 | expressions::expr(p); |
@@ -132,7 +132,7 @@ pub(crate) fn named_field_def_list(p: &mut Parser) { | |||
132 | } | 132 | } |
133 | } | 133 | } |
134 | 134 | ||
135 | fn pos_field_list(p: &mut Parser) { | 135 | fn pos_field_def_list(p: &mut Parser) { |
136 | assert!(p.at(L_PAREN)); | 136 | assert!(p.at(L_PAREN)); |
137 | let m = p.start(); | 137 | let m = p.start(); |
138 | if !p.expect(L_PAREN) { | 138 | if !p.expect(L_PAREN) { |
@@ -157,12 +157,12 @@ fn pos_field_list(p: &mut Parser) { | |||
157 | break; | 157 | break; |
158 | } | 158 | } |
159 | types::type_(p); | 159 | types::type_(p); |
160 | m.complete(p, POS_FIELD); | 160 | m.complete(p, POS_FIELD_DEF); |
161 | 161 | ||
162 | if !p.at(R_PAREN) { | 162 | if !p.at(R_PAREN) { |
163 | p.expect(COMMA); | 163 | p.expect(COMMA); |
164 | } | 164 | } |
165 | } | 165 | } |
166 | p.expect(R_PAREN); | 166 | p.expect(R_PAREN); |
167 | m.complete(p, POS_FIELD_LIST); | 167 | m.complete(p, POS_FIELD_DEF_LIST); |
168 | } | 168 | } |
diff --git a/crates/ra_syntax/src/parser_impl/event.rs b/crates/ra_syntax/src/parser_impl/event.rs index bd77fee89..33e10ef85 100644 --- a/crates/ra_syntax/src/parser_impl/event.rs +++ b/crates/ra_syntax/src/parser_impl/event.rs | |||
@@ -250,7 +250,7 @@ fn n_attached_trivias<'a>( | |||
250 | ) -> usize { | 250 | ) -> usize { |
251 | match kind { | 251 | match kind { |
252 | CONST_DEF | TYPE_DEF | STRUCT_DEF | ENUM_DEF | ENUM_VARIANT | FN_DEF | TRAIT_DEF | 252 | CONST_DEF | TYPE_DEF | STRUCT_DEF | ENUM_DEF | ENUM_VARIANT | FN_DEF | TRAIT_DEF |
253 | | MODULE => { | 253 | | MODULE | NAMED_FIELD_DEF => { |
254 | let mut res = 0; | 254 | let mut res = 0; |
255 | for (i, (kind, text)) in trivias.enumerate() { | 255 | for (i, (kind, text)) in trivias.enumerate() { |
256 | match kind { | 256 | match kind { |
diff --git a/crates/ra_syntax/src/syntax_kinds/generated.rs b/crates/ra_syntax/src/syntax_kinds/generated.rs index 06faf7557..14f610891 100644 --- a/crates/ra_syntax/src/syntax_kinds/generated.rs +++ b/crates/ra_syntax/src/syntax_kinds/generated.rs | |||
@@ -195,8 +195,8 @@ pub enum SyntaxKind { | |||
195 | ENUM_VARIANT, | 195 | ENUM_VARIANT, |
196 | NAMED_FIELD_DEF_LIST, | 196 | NAMED_FIELD_DEF_LIST, |
197 | NAMED_FIELD_DEF, | 197 | NAMED_FIELD_DEF, |
198 | POS_FIELD_LIST, | 198 | POS_FIELD_DEF_LIST, |
199 | POS_FIELD, | 199 | POS_FIELD_DEF, |
200 | ENUM_VARIANT_LIST, | 200 | ENUM_VARIANT_LIST, |
201 | ITEM_LIST, | 201 | ITEM_LIST, |
202 | ATTR, | 202 | ATTR, |
@@ -458,8 +458,8 @@ impl SyntaxKind { | |||
458 | ENUM_VARIANT => &SyntaxInfo { name: "ENUM_VARIANT" }, | 458 | ENUM_VARIANT => &SyntaxInfo { name: "ENUM_VARIANT" }, |
459 | NAMED_FIELD_DEF_LIST => &SyntaxInfo { name: "NAMED_FIELD_DEF_LIST" }, | 459 | NAMED_FIELD_DEF_LIST => &SyntaxInfo { name: "NAMED_FIELD_DEF_LIST" }, |
460 | NAMED_FIELD_DEF => &SyntaxInfo { name: "NAMED_FIELD_DEF" }, | 460 | NAMED_FIELD_DEF => &SyntaxInfo { name: "NAMED_FIELD_DEF" }, |
461 | POS_FIELD_LIST => &SyntaxInfo { name: "POS_FIELD_LIST" }, | 461 | POS_FIELD_DEF_LIST => &SyntaxInfo { name: "POS_FIELD_DEF_LIST" }, |
462 | POS_FIELD => &SyntaxInfo { name: "POS_FIELD" }, | 462 | POS_FIELD_DEF => &SyntaxInfo { name: "POS_FIELD_DEF" }, |
463 | ENUM_VARIANT_LIST => &SyntaxInfo { name: "ENUM_VARIANT_LIST" }, | 463 | ENUM_VARIANT_LIST => &SyntaxInfo { name: "ENUM_VARIANT_LIST" }, |
464 | ITEM_LIST => &SyntaxInfo { name: "ITEM_LIST" }, | 464 | ITEM_LIST => &SyntaxInfo { name: "ITEM_LIST" }, |
465 | ATTR => &SyntaxInfo { name: "ATTR" }, | 465 | ATTR => &SyntaxInfo { name: "ATTR" }, |
diff --git a/crates/ra_syntax/tests/data/parser/err/0013_invalid_type.txt b/crates/ra_syntax/tests/data/parser/err/0013_invalid_type.txt index 83d716b2f..1ec15194c 100644 --- a/crates/ra_syntax/tests/data/parser/err/0013_invalid_type.txt +++ b/crates/ra_syntax/tests/data/parser/err/0013_invalid_type.txt | |||
@@ -7,10 +7,10 @@ SOURCE_FILE@[0; 86) | |||
7 | WHITESPACE@[10; 11) | 7 | WHITESPACE@[10; 11) |
8 | NAME@[11; 16) | 8 | NAME@[11; 16) |
9 | IDENT@[11; 16) "Cache" | 9 | IDENT@[11; 16) "Cache" |
10 | POS_FIELD_LIST@[16; 72) | 10 | POS_FIELD_DEF_LIST@[16; 72) |
11 | L_PAREN@[16; 17) | 11 | L_PAREN@[16; 17) |
12 | WHITESPACE@[17; 22) | 12 | WHITESPACE@[17; 22) |
13 | POS_FIELD@[22; 68) | 13 | POS_FIELD_DEF@[22; 68) |
14 | PATH_TYPE@[22; 68) | 14 | PATH_TYPE@[22; 68) |
15 | PATH@[22; 68) | 15 | PATH@[22; 68) |
16 | PATH_SEGMENT@[22; 68) | 16 | PATH_SEGMENT@[22; 68) |
@@ -55,7 +55,7 @@ SOURCE_FILE@[0; 86) | |||
55 | err: `expected R_ANGLE` | 55 | err: `expected R_ANGLE` |
56 | err: `expected COMMA` | 56 | err: `expected COMMA` |
57 | WHITESPACE@[68; 69) | 57 | WHITESPACE@[68; 69) |
58 | POS_FIELD@[69; 72) | 58 | POS_FIELD_DEF@[69; 72) |
59 | PATH_TYPE@[69; 72) | 59 | PATH_TYPE@[69; 72) |
60 | PATH@[69; 72) | 60 | PATH@[69; 72) |
61 | PATH_SEGMENT@[69; 72) | 61 | PATH_SEGMENT@[69; 72) |
diff --git a/crates/ra_syntax/tests/data/parser/err/0025_nope.txt b/crates/ra_syntax/tests/data/parser/err/0025_nope.txt index 615064f77..e0824ca56 100644 --- a/crates/ra_syntax/tests/data/parser/err/0025_nope.txt +++ b/crates/ra_syntax/tests/data/parser/err/0025_nope.txt | |||
@@ -28,9 +28,9 @@ SOURCE_FILE@[0; 575) | |||
28 | ENUM_VARIANT@[50; 62) | 28 | ENUM_VARIANT@[50; 62) |
29 | NAME@[50; 54) | 29 | NAME@[50; 54) |
30 | IDENT@[50; 54) "Var2" | 30 | IDENT@[50; 54) "Var2" |
31 | POS_FIELD_LIST@[54; 62) | 31 | POS_FIELD_DEF_LIST@[54; 62) |
32 | L_PAREN@[54; 55) | 32 | L_PAREN@[54; 55) |
33 | POS_FIELD@[55; 61) | 33 | POS_FIELD_DEF@[55; 61) |
34 | PATH_TYPE@[55; 61) | 34 | PATH_TYPE@[55; 61) |
35 | PATH@[55; 61) | 35 | PATH@[55; 61) |
36 | PATH_SEGMENT@[55; 61) | 36 | PATH_SEGMENT@[55; 61) |
@@ -151,9 +151,9 @@ SOURCE_FILE@[0; 575) | |||
151 | ENUM_VARIANT@[363; 372) | 151 | ENUM_VARIANT@[363; 372) |
152 | NAME@[363; 367) | 152 | NAME@[363; 367) |
153 | IDENT@[363; 367) "Nope" | 153 | IDENT@[363; 367) "Nope" |
154 | POS_FIELD_LIST@[367; 372) | 154 | POS_FIELD_DEF_LIST@[367; 372) |
155 | L_PAREN@[367; 368) | 155 | L_PAREN@[367; 368) |
156 | POS_FIELD@[368; 371) | 156 | POS_FIELD_DEF@[368; 371) |
157 | PATH_TYPE@[368; 371) | 157 | PATH_TYPE@[368; 371) |
158 | PATH@[368; 371) | 158 | PATH@[368; 371) |
159 | PATH_SEGMENT@[368; 371) | 159 | PATH_SEGMENT@[368; 371) |
diff --git a/crates/ra_syntax/tests/data/parser/inline/ok/0083_struct_items.txt b/crates/ra_syntax/tests/data/parser/inline/ok/0083_struct_items.txt index 058639375..6247729f6 100644 --- a/crates/ra_syntax/tests/data/parser/inline/ok/0083_struct_items.txt +++ b/crates/ra_syntax/tests/data/parser/inline/ok/0083_struct_items.txt | |||
@@ -21,7 +21,7 @@ SOURCE_FILE@[0; 106) | |||
21 | WHITESPACE@[32; 33) | 21 | WHITESPACE@[32; 33) |
22 | NAME@[33; 36) | 22 | NAME@[33; 36) |
23 | IDENT@[33; 36) "Foo" | 23 | IDENT@[33; 36) "Foo" |
24 | POS_FIELD_LIST@[36; 38) | 24 | POS_FIELD_DEF_LIST@[36; 38) |
25 | L_PAREN@[36; 37) | 25 | L_PAREN@[36; 37) |
26 | R_PAREN@[37; 38) | 26 | R_PAREN@[37; 38) |
27 | SEMI@[38; 39) | 27 | SEMI@[38; 39) |
@@ -31,9 +31,9 @@ SOURCE_FILE@[0; 106) | |||
31 | WHITESPACE@[46; 47) | 31 | WHITESPACE@[46; 47) |
32 | NAME@[47; 50) | 32 | NAME@[47; 50) |
33 | IDENT@[47; 50) "Foo" | 33 | IDENT@[47; 50) "Foo" |
34 | POS_FIELD_LIST@[50; 65) | 34 | POS_FIELD_DEF_LIST@[50; 65) |
35 | L_PAREN@[50; 51) | 35 | L_PAREN@[50; 51) |
36 | POS_FIELD@[51; 57) | 36 | POS_FIELD_DEF@[51; 57) |
37 | PATH_TYPE@[51; 57) | 37 | PATH_TYPE@[51; 57) |
38 | PATH@[51; 57) | 38 | PATH@[51; 57) |
39 | PATH_SEGMENT@[51; 57) | 39 | PATH_SEGMENT@[51; 57) |
@@ -41,7 +41,7 @@ SOURCE_FILE@[0; 106) | |||
41 | IDENT@[51; 57) "String" | 41 | IDENT@[51; 57) "String" |
42 | COMMA@[57; 58) | 42 | COMMA@[57; 58) |
43 | WHITESPACE@[58; 59) | 43 | WHITESPACE@[58; 59) |
44 | POS_FIELD@[59; 64) | 44 | POS_FIELD_DEF@[59; 64) |
45 | PATH_TYPE@[59; 64) | 45 | PATH_TYPE@[59; 64) |
46 | PATH@[59; 64) | 46 | PATH@[59; 64) |
47 | PATH_SEGMENT@[59; 64) | 47 | PATH_SEGMENT@[59; 64) |
diff --git a/crates/ra_syntax/tests/data/parser/inline/ok/0114_tuple_struct_where.txt b/crates/ra_syntax/tests/data/parser/inline/ok/0114_tuple_struct_where.txt index b7de83072..4f439f21c 100644 --- a/crates/ra_syntax/tests/data/parser/inline/ok/0114_tuple_struct_where.txt +++ b/crates/ra_syntax/tests/data/parser/inline/ok/0114_tuple_struct_where.txt | |||
@@ -10,9 +10,9 @@ SOURCE_FILE@[0; 53) | |||
10 | NAME@[12; 13) | 10 | NAME@[12; 13) |
11 | IDENT@[12; 13) "T" | 11 | IDENT@[12; 13) "T" |
12 | R_ANGLE@[13; 14) | 12 | R_ANGLE@[13; 14) |
13 | POS_FIELD_LIST@[14; 17) | 13 | POS_FIELD_DEF_LIST@[14; 17) |
14 | L_PAREN@[14; 15) | 14 | L_PAREN@[14; 15) |
15 | POS_FIELD@[15; 16) | 15 | POS_FIELD_DEF@[15; 16) |
16 | PATH_TYPE@[15; 16) | 16 | PATH_TYPE@[15; 16) |
17 | PATH@[15; 16) | 17 | PATH@[15; 16) |
18 | PATH_SEGMENT@[15; 16) | 18 | PATH_SEGMENT@[15; 16) |
@@ -49,9 +49,9 @@ SOURCE_FILE@[0; 53) | |||
49 | NAME@[46; 47) | 49 | NAME@[46; 47) |
50 | IDENT@[46; 47) "T" | 50 | IDENT@[46; 47) "T" |
51 | R_ANGLE@[47; 48) | 51 | R_ANGLE@[47; 48) |
52 | POS_FIELD_LIST@[48; 51) | 52 | POS_FIELD_DEF_LIST@[48; 51) |
53 | L_PAREN@[48; 49) | 53 | L_PAREN@[48; 49) |
54 | POS_FIELD@[49; 50) | 54 | POS_FIELD_DEF@[49; 50) |
55 | PATH_TYPE@[49; 50) | 55 | PATH_TYPE@[49; 50) |
56 | PATH@[49; 50) | 56 | PATH@[49; 50) |
57 | PATH_SEGMENT@[49; 50) | 57 | PATH_SEGMENT@[49; 50) |
diff --git a/crates/ra_syntax/tests/data/parser/inline/ok/0115_pos_field_attrs.txt b/crates/ra_syntax/tests/data/parser/inline/ok/0115_pos_field_attrs.txt index 99ec0755b..4da84ba48 100644 --- a/crates/ra_syntax/tests/data/parser/inline/ok/0115_pos_field_attrs.txt +++ b/crates/ra_syntax/tests/data/parser/inline/ok/0115_pos_field_attrs.txt | |||
@@ -5,10 +5,10 @@ SOURCE_FILE@[0; 60) | |||
5 | NAME@[7; 8) | 5 | NAME@[7; 8) |
6 | IDENT@[7; 8) "S" | 6 | IDENT@[7; 8) "S" |
7 | WHITESPACE@[8; 9) | 7 | WHITESPACE@[8; 9) |
8 | POS_FIELD_LIST@[9; 58) | 8 | POS_FIELD_DEF_LIST@[9; 58) |
9 | L_PAREN@[9; 10) | 9 | L_PAREN@[9; 10) |
10 | WHITESPACE@[10; 15) | 10 | WHITESPACE@[10; 15) |
11 | POS_FIELD@[15; 55) | 11 | POS_FIELD_DEF@[15; 55) |
12 | ATTR@[15; 43) | 12 | ATTR@[15; 43) |
13 | POUND@[15; 16) | 13 | POUND@[15; 16) |
14 | TOKEN_TREE@[16; 43) | 14 | TOKEN_TREE@[16; 43) |
diff --git a/crates/ra_syntax/tests/data/parser/ok/0016_struct_flavors.txt b/crates/ra_syntax/tests/data/parser/ok/0016_struct_flavors.txt index 1d355823d..d021d6de6 100644 --- a/crates/ra_syntax/tests/data/parser/ok/0016_struct_flavors.txt +++ b/crates/ra_syntax/tests/data/parser/ok/0016_struct_flavors.txt | |||
@@ -21,7 +21,7 @@ SOURCE_FILE@[0; 97) | |||
21 | WHITESPACE@[28; 29) | 21 | WHITESPACE@[28; 29) |
22 | NAME@[29; 30) | 22 | NAME@[29; 30) |
23 | IDENT@[29; 30) "C" | 23 | IDENT@[29; 30) "C" |
24 | POS_FIELD_LIST@[30; 32) | 24 | POS_FIELD_DEF_LIST@[30; 32) |
25 | L_PAREN@[30; 31) | 25 | L_PAREN@[30; 31) |
26 | R_PAREN@[31; 32) | 26 | R_PAREN@[31; 32) |
27 | SEMI@[32; 33) | 27 | SEMI@[32; 33) |
@@ -68,9 +68,9 @@ SOURCE_FILE@[0; 97) | |||
68 | WHITESPACE@[82; 83) | 68 | WHITESPACE@[82; 83) |
69 | NAME@[83; 84) | 69 | NAME@[83; 84) |
70 | IDENT@[83; 84) "E" | 70 | IDENT@[83; 84) "E" |
71 | POS_FIELD_LIST@[84; 95) | 71 | POS_FIELD_DEF_LIST@[84; 95) |
72 | L_PAREN@[84; 85) | 72 | L_PAREN@[84; 85) |
73 | POS_FIELD@[85; 90) | 73 | POS_FIELD_DEF@[85; 90) |
74 | VISIBILITY@[85; 88) | 74 | VISIBILITY@[85; 88) |
75 | PUB_KW@[85; 88) | 75 | PUB_KW@[85; 88) |
76 | WHITESPACE@[88; 89) | 76 | WHITESPACE@[88; 89) |
@@ -81,7 +81,7 @@ SOURCE_FILE@[0; 97) | |||
81 | IDENT@[89; 90) "x" | 81 | IDENT@[89; 90) "x" |
82 | COMMA@[90; 91) | 82 | COMMA@[90; 91) |
83 | WHITESPACE@[91; 92) | 83 | WHITESPACE@[91; 92) |
84 | POS_FIELD@[92; 93) | 84 | POS_FIELD_DEF@[92; 93) |
85 | PATH_TYPE@[92; 93) | 85 | PATH_TYPE@[92; 93) |
86 | PATH@[92; 93) | 86 | PATH@[92; 93) |
87 | PATH_SEGMENT@[92; 93) | 87 | PATH_SEGMENT@[92; 93) |
diff --git a/crates/ra_syntax/tests/data/parser/ok/0018_struct_type_params.txt b/crates/ra_syntax/tests/data/parser/ok/0018_struct_type_params.txt index 491d154df..05480c81b 100644 --- a/crates/ra_syntax/tests/data/parser/ok/0018_struct_type_params.txt +++ b/crates/ra_syntax/tests/data/parser/ok/0018_struct_type_params.txt | |||
@@ -23,9 +23,9 @@ SOURCE_FILE@[0; 290) | |||
23 | NAME@[24; 25) | 23 | NAME@[24; 25) |
24 | IDENT@[24; 25) "T" | 24 | IDENT@[24; 25) "T" |
25 | R_ANGLE@[25; 26) | 25 | R_ANGLE@[25; 26) |
26 | POS_FIELD_LIST@[26; 31) | 26 | POS_FIELD_DEF_LIST@[26; 31) |
27 | L_PAREN@[26; 27) | 27 | L_PAREN@[26; 27) |
28 | POS_FIELD@[27; 30) | 28 | POS_FIELD_DEF@[27; 30) |
29 | PATH_TYPE@[27; 30) | 29 | PATH_TYPE@[27; 30) |
30 | PATH@[27; 30) | 30 | PATH@[27; 30) |
31 | PATH_SEGMENT@[27; 30) | 31 | PATH_SEGMENT@[27; 30) |
diff --git a/crates/ra_syntax/tests/data/parser/ok/0019_enums.txt b/crates/ra_syntax/tests/data/parser/ok/0019_enums.txt index 6925219e8..2ac55cfbc 100644 --- a/crates/ra_syntax/tests/data/parser/ok/0019_enums.txt +++ b/crates/ra_syntax/tests/data/parser/ok/0019_enums.txt | |||
@@ -131,9 +131,9 @@ SOURCE_FILE@[0; 182) | |||
131 | ENUM_VARIANT@[162; 169) | 131 | ENUM_VARIANT@[162; 169) |
132 | NAME@[162; 163) | 132 | NAME@[162; 163) |
133 | IDENT@[162; 163) "D" | 133 | IDENT@[162; 163) "D" |
134 | POS_FIELD_LIST@[163; 169) | 134 | POS_FIELD_DEF_LIST@[163; 169) |
135 | L_PAREN@[163; 164) | 135 | L_PAREN@[163; 164) |
136 | POS_FIELD@[164; 167) | 136 | POS_FIELD_DEF@[164; 167) |
137 | PATH_TYPE@[164; 167) | 137 | PATH_TYPE@[164; 167) |
138 | PATH@[164; 167) | 138 | PATH@[164; 167) |
139 | PATH_SEGMENT@[164; 167) | 139 | PATH_SEGMENT@[164; 167) |
@@ -146,7 +146,7 @@ SOURCE_FILE@[0; 182) | |||
146 | ENUM_VARIANT@[175; 178) | 146 | ENUM_VARIANT@[175; 178) |
147 | NAME@[175; 176) | 147 | NAME@[175; 176) |
148 | IDENT@[175; 176) "E" | 148 | IDENT@[175; 176) "E" |
149 | POS_FIELD_LIST@[176; 178) | 149 | POS_FIELD_DEF_LIST@[176; 178) |
150 | L_PAREN@[176; 177) | 150 | L_PAREN@[176; 177) |
151 | R_PAREN@[177; 178) | 151 | R_PAREN@[177; 178) |
152 | COMMA@[178; 179) | 152 | COMMA@[178; 179) |
diff --git a/crates/ra_vfs/Cargo.toml b/crates/ra_vfs/Cargo.toml index e637063c9..383381d2a 100644 --- a/crates/ra_vfs/Cargo.toml +++ b/crates/ra_vfs/Cargo.toml | |||
@@ -10,9 +10,13 @@ relative-path = "0.4.0" | |||
10 | rustc-hash = "1.0" | 10 | rustc-hash = "1.0" |
11 | crossbeam-channel = "0.3.5" | 11 | crossbeam-channel = "0.3.5" |
12 | log = "0.4.6" | 12 | log = "0.4.6" |
13 | notify = "4.0.7" | ||
14 | drop_bomb = "0.1.0" | ||
15 | parking_lot = "0.7.0" | ||
13 | 16 | ||
14 | thread_worker = { path = "../thread_worker" } | 17 | thread_worker = { path = "../thread_worker" } |
15 | ra_arena = { path = "../ra_arena" } | 18 | ra_arena = { path = "../ra_arena" } |
16 | 19 | ||
17 | [dev-dependencies] | 20 | [dev-dependencies] |
18 | tempfile = "3" | 21 | tempfile = "3" |
22 | flexi_logger = "0.10.0" | ||
diff --git a/crates/ra_vfs/src/io.rs b/crates/ra_vfs/src/io.rs index 80328ad18..7ca1e9835 100644 --- a/crates/ra_vfs/src/io.rs +++ b/crates/ra_vfs/src/io.rs | |||
@@ -1,55 +1,109 @@ | |||
1 | use std::{ | 1 | use std::{fs, sync::Arc, thread}; |
2 | fmt, | ||
3 | fs, | ||
4 | path::{Path, PathBuf}, | ||
5 | }; | ||
6 | 2 | ||
7 | use walkdir::{DirEntry, WalkDir}; | 3 | use crossbeam_channel::{Receiver, Sender}; |
8 | use thread_worker::{WorkerHandle}; | ||
9 | use relative_path::RelativePathBuf; | 4 | use relative_path::RelativePathBuf; |
5 | use thread_worker::WorkerHandle; | ||
6 | use walkdir::WalkDir; | ||
10 | 7 | ||
11 | use crate::{VfsRoot, has_rs_extension}; | 8 | mod watcher; |
9 | use watcher::Watcher; | ||
12 | 10 | ||
13 | pub(crate) struct Task { | 11 | use crate::{RootFilter, Roots, VfsRoot}; |
14 | pub(crate) root: VfsRoot, | 12 | |
15 | pub(crate) path: PathBuf, | 13 | pub(crate) enum Task { |
16 | pub(crate) filter: Box<Fn(&DirEntry) -> bool + Send>, | 14 | AddRoot { |
15 | root: VfsRoot, | ||
16 | filter: Arc<RootFilter>, | ||
17 | }, | ||
17 | } | 18 | } |
18 | 19 | ||
19 | pub struct TaskResult { | 20 | #[derive(Debug)] |
20 | pub(crate) root: VfsRoot, | 21 | pub enum TaskResult { |
21 | pub(crate) files: Vec<(RelativePathBuf, String)>, | 22 | BulkLoadRoot { |
23 | root: VfsRoot, | ||
24 | files: Vec<(RelativePathBuf, String)>, | ||
25 | }, | ||
26 | AddSingleFile { | ||
27 | root: VfsRoot, | ||
28 | path: RelativePathBuf, | ||
29 | text: String, | ||
30 | }, | ||
31 | ChangeSingleFile { | ||
32 | root: VfsRoot, | ||
33 | path: RelativePathBuf, | ||
34 | text: String, | ||
35 | }, | ||
36 | RemoveSingleFile { | ||
37 | root: VfsRoot, | ||
38 | path: RelativePathBuf, | ||
39 | }, | ||
22 | } | 40 | } |
23 | 41 | ||
24 | impl fmt::Debug for TaskResult { | 42 | pub(crate) struct Worker { |
25 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | 43 | worker: thread_worker::Worker<Task, TaskResult>, |
26 | f.write_str("TaskResult { ... }") | 44 | worker_handle: WorkerHandle, |
27 | } | ||
28 | } | 45 | } |
29 | 46 | ||
30 | pub(crate) type Worker = thread_worker::Worker<Task, TaskResult>; | 47 | impl Worker { |
48 | pub(crate) fn start(roots: Arc<Roots>) -> Worker { | ||
49 | let (worker, worker_handle) = | ||
50 | thread_worker::spawn("vfs", 128, move |input_receiver, output_sender| { | ||
51 | let mut watcher = match Watcher::start(roots, output_sender.clone()) { | ||
52 | Ok(w) => Some(w), | ||
53 | Err(e) => { | ||
54 | log::error!("could not start watcher: {}", e); | ||
55 | None | ||
56 | } | ||
57 | }; | ||
58 | let res = input_receiver | ||
59 | .into_iter() | ||
60 | .filter_map(|t| handle_task(t, &mut watcher)) | ||
61 | .try_for_each(|it| output_sender.send(it)); | ||
62 | if let Some(watcher) = watcher { | ||
63 | let _ = watcher.shutdown(); | ||
64 | } | ||
65 | res.unwrap() | ||
66 | }); | ||
67 | Worker { | ||
68 | worker, | ||
69 | worker_handle, | ||
70 | } | ||
71 | } | ||
72 | |||
73 | pub(crate) fn sender(&self) -> &Sender<Task> { | ||
74 | &self.worker.inp | ||
75 | } | ||
76 | |||
77 | pub(crate) fn receiver(&self) -> &Receiver<TaskResult> { | ||
78 | &self.worker.out | ||
79 | } | ||
31 | 80 | ||
32 | pub(crate) fn start() -> (Worker, WorkerHandle) { | 81 | pub(crate) fn shutdown(self) -> thread::Result<()> { |
33 | thread_worker::spawn("vfs", 128, |input_receiver, output_sender| { | 82 | let _ = self.worker.shutdown(); |
34 | input_receiver | 83 | self.worker_handle.shutdown() |
35 | .into_iter() | 84 | } |
36 | .map(handle_task) | ||
37 | .try_for_each(|it| output_sender.send(it)) | ||
38 | .unwrap() | ||
39 | }) | ||
40 | } | 85 | } |
41 | 86 | ||
42 | fn handle_task(task: Task) -> TaskResult { | 87 | fn handle_task(task: Task, watcher: &mut Option<Watcher>) -> Option<TaskResult> { |
43 | let Task { root, path, filter } = task; | 88 | match task { |
44 | log::debug!("loading {} ...", path.as_path().display()); | 89 | Task::AddRoot { root, filter } => { |
45 | let files = load_root(path.as_path(), &*filter); | 90 | if let Some(watcher) = watcher { |
46 | log::debug!("... loaded {}", path.as_path().display()); | 91 | watcher.watch_root(&filter) |
47 | TaskResult { root, files } | 92 | } |
93 | log::debug!("loading {} ...", filter.root.as_path().display()); | ||
94 | let files = load_root(filter.as_ref()); | ||
95 | log::debug!("... loaded {}", filter.root.as_path().display()); | ||
96 | Some(TaskResult::BulkLoadRoot { root, files }) | ||
97 | } | ||
98 | } | ||
48 | } | 99 | } |
49 | 100 | ||
50 | fn load_root(root: &Path, filter: &dyn Fn(&DirEntry) -> bool) -> Vec<(RelativePathBuf, String)> { | 101 | fn load_root(filter: &RootFilter) -> Vec<(RelativePathBuf, String)> { |
51 | let mut res = Vec::new(); | 102 | let mut res = Vec::new(); |
52 | for entry in WalkDir::new(root).into_iter().filter_entry(filter) { | 103 | for entry in WalkDir::new(&filter.root) |
104 | .into_iter() | ||
105 | .filter_entry(filter.entry_filter()) | ||
106 | { | ||
53 | let entry = match entry { | 107 | let entry = match entry { |
54 | Ok(entry) => entry, | 108 | Ok(entry) => entry, |
55 | Err(e) => { | 109 | Err(e) => { |
@@ -61,9 +115,6 @@ fn load_root(root: &Path, filter: &dyn Fn(&DirEntry) -> bool) -> Vec<(RelativePa | |||
61 | continue; | 115 | continue; |
62 | } | 116 | } |
63 | let path = entry.path(); | 117 | let path = entry.path(); |
64 | if !has_rs_extension(path) { | ||
65 | continue; | ||
66 | } | ||
67 | let text = match fs::read_to_string(path) { | 118 | let text = match fs::read_to_string(path) { |
68 | Ok(text) => text, | 119 | Ok(text) => text, |
69 | Err(e) => { | 120 | Err(e) => { |
@@ -71,7 +122,7 @@ fn load_root(root: &Path, filter: &dyn Fn(&DirEntry) -> bool) -> Vec<(RelativePa | |||
71 | continue; | 122 | continue; |
72 | } | 123 | } |
73 | }; | 124 | }; |
74 | let path = RelativePathBuf::from_path(path.strip_prefix(root).unwrap()).unwrap(); | 125 | let path = RelativePathBuf::from_path(path.strip_prefix(&filter.root).unwrap()).unwrap(); |
75 | res.push((path.to_owned(), text)) | 126 | res.push((path.to_owned(), text)) |
76 | } | 127 | } |
77 | res | 128 | res |
diff --git a/crates/ra_vfs/src/io/watcher.rs b/crates/ra_vfs/src/io/watcher.rs new file mode 100644 index 000000000..ff6775f59 --- /dev/null +++ b/crates/ra_vfs/src/io/watcher.rs | |||
@@ -0,0 +1,200 @@ | |||
1 | use crate::{io, RootFilter, Roots, VfsRoot}; | ||
2 | use crossbeam_channel::Sender; | ||
3 | use drop_bomb::DropBomb; | ||
4 | use notify::{DebouncedEvent, RecommendedWatcher, RecursiveMode, Watcher as NotifyWatcher}; | ||
5 | use parking_lot::Mutex; | ||
6 | use std::{ | ||
7 | fs, | ||
8 | path::{Path, PathBuf}, | ||
9 | sync::{mpsc, Arc}, | ||
10 | thread, | ||
11 | time::Duration, | ||
12 | }; | ||
13 | use walkdir::WalkDir; | ||
14 | |||
15 | #[derive(Debug)] | ||
16 | enum ChangeKind { | ||
17 | Create, | ||
18 | Write, | ||
19 | Remove, | ||
20 | } | ||
21 | |||
22 | const WATCHER_DELAY: Duration = Duration::from_millis(250); | ||
23 | |||
24 | pub(crate) struct Watcher { | ||
25 | thread: thread::JoinHandle<()>, | ||
26 | bomb: DropBomb, | ||
27 | watcher: Arc<Mutex<Option<RecommendedWatcher>>>, | ||
28 | } | ||
29 | |||
30 | impl Watcher { | ||
31 | pub(crate) fn start( | ||
32 | roots: Arc<Roots>, | ||
33 | output_sender: Sender<io::TaskResult>, | ||
34 | ) -> Result<Watcher, Box<std::error::Error>> { | ||
35 | let (input_sender, input_receiver) = mpsc::channel(); | ||
36 | let watcher = Arc::new(Mutex::new(Some(notify::watcher( | ||
37 | input_sender, | ||
38 | WATCHER_DELAY, | ||
39 | )?))); | ||
40 | let sender = output_sender.clone(); | ||
41 | let watcher_clone = watcher.clone(); | ||
42 | let thread = thread::spawn(move || { | ||
43 | let worker = WatcherWorker { | ||
44 | roots, | ||
45 | watcher: watcher_clone, | ||
46 | sender, | ||
47 | }; | ||
48 | input_receiver | ||
49 | .into_iter() | ||
50 | // forward relevant events only | ||
51 | .try_for_each(|change| worker.handle_debounced_event(change)) | ||
52 | .unwrap() | ||
53 | }); | ||
54 | Ok(Watcher { | ||
55 | thread, | ||
56 | watcher, | ||
57 | bomb: DropBomb::new(format!("Watcher was not shutdown")), | ||
58 | }) | ||
59 | } | ||
60 | |||
61 | pub fn watch_root(&mut self, filter: &RootFilter) { | ||
62 | for res in WalkDir::new(&filter.root) | ||
63 | .into_iter() | ||
64 | .filter_entry(filter.entry_filter()) | ||
65 | { | ||
66 | match res { | ||
67 | Ok(entry) => { | ||
68 | if entry.file_type().is_dir() { | ||
69 | watch_one(self.watcher.as_ref(), entry.path()); | ||
70 | } | ||
71 | } | ||
72 | Err(e) => log::warn!("watcher error: {}", e), | ||
73 | } | ||
74 | } | ||
75 | } | ||
76 | |||
77 | pub fn shutdown(mut self) -> thread::Result<()> { | ||
78 | self.bomb.defuse(); | ||
79 | drop(self.watcher.lock().take()); | ||
80 | let res = self.thread.join(); | ||
81 | match &res { | ||
82 | Ok(()) => log::info!("... Watcher terminated with ok"), | ||
83 | Err(_) => log::error!("... Watcher terminated with err"), | ||
84 | } | ||
85 | res | ||
86 | } | ||
87 | } | ||
88 | |||
89 | struct WatcherWorker { | ||
90 | watcher: Arc<Mutex<Option<RecommendedWatcher>>>, | ||
91 | roots: Arc<Roots>, | ||
92 | sender: Sender<io::TaskResult>, | ||
93 | } | ||
94 | |||
95 | impl WatcherWorker { | ||
96 | fn handle_debounced_event(&self, ev: DebouncedEvent) -> Result<(), Box<std::error::Error>> { | ||
97 | match ev { | ||
98 | DebouncedEvent::NoticeWrite(_) | ||
99 | | DebouncedEvent::NoticeRemove(_) | ||
100 | | DebouncedEvent::Chmod(_) => { | ||
101 | // ignore | ||
102 | } | ||
103 | DebouncedEvent::Rescan => { | ||
104 | // TODO rescan all roots | ||
105 | } | ||
106 | DebouncedEvent::Create(path) => { | ||
107 | self.handle_change(path, ChangeKind::Create); | ||
108 | } | ||
109 | DebouncedEvent::Write(path) => { | ||
110 | self.handle_change(path, ChangeKind::Write); | ||
111 | } | ||
112 | DebouncedEvent::Remove(path) => { | ||
113 | self.handle_change(path, ChangeKind::Remove); | ||
114 | } | ||
115 | DebouncedEvent::Rename(src, dst) => { | ||
116 | self.handle_change(src, ChangeKind::Remove); | ||
117 | self.handle_change(dst, ChangeKind::Create); | ||
118 | } | ||
119 | DebouncedEvent::Error(err, path) => { | ||
120 | // TODO should we reload the file contents? | ||
121 | log::warn!("watcher error \"{}\", {:?}", err, path); | ||
122 | } | ||
123 | } | ||
124 | Ok(()) | ||
125 | } | ||
126 | |||
127 | fn handle_change(&self, path: PathBuf, kind: ChangeKind) { | ||
128 | if let Err(e) = self.try_handle_change(path, kind) { | ||
129 | log::warn!("watcher error: {}", e) | ||
130 | } | ||
131 | } | ||
132 | |||
133 | fn try_handle_change( | ||
134 | &self, | ||
135 | path: PathBuf, | ||
136 | kind: ChangeKind, | ||
137 | ) -> Result<(), Box<std::error::Error>> { | ||
138 | let (root, rel_path) = match self.roots.find(&path) { | ||
139 | Some(x) => x, | ||
140 | None => return Ok(()), | ||
141 | }; | ||
142 | match kind { | ||
143 | ChangeKind::Create => { | ||
144 | if path.is_dir() { | ||
145 | self.watch_recursive(&path, root); | ||
146 | } else { | ||
147 | let text = fs::read_to_string(&path)?; | ||
148 | self.sender.send(io::TaskResult::AddSingleFile { | ||
149 | root, | ||
150 | path: rel_path, | ||
151 | text, | ||
152 | })? | ||
153 | } | ||
154 | } | ||
155 | ChangeKind::Write => { | ||
156 | let text = fs::read_to_string(&path)?; | ||
157 | self.sender.send(io::TaskResult::ChangeSingleFile { | ||
158 | root, | ||
159 | path: rel_path, | ||
160 | text, | ||
161 | })? | ||
162 | } | ||
163 | ChangeKind::Remove => self.sender.send(io::TaskResult::RemoveSingleFile { | ||
164 | root, | ||
165 | path: rel_path, | ||
166 | })?, | ||
167 | } | ||
168 | Ok(()) | ||
169 | } | ||
170 | |||
171 | fn watch_recursive(&self, dir: &Path, root: VfsRoot) { | ||
172 | let filter = &self.roots[root]; | ||
173 | for res in WalkDir::new(dir) | ||
174 | .into_iter() | ||
175 | .filter_entry(filter.entry_filter()) | ||
176 | { | ||
177 | match res { | ||
178 | Ok(entry) => { | ||
179 | if entry.file_type().is_dir() { | ||
180 | watch_one(self.watcher.as_ref(), entry.path()); | ||
181 | } else { | ||
182 | // emit only for files otherwise we will cause watch_recursive to be called again with a dir that we are already watching | ||
183 | // emit as create because we haven't seen it yet | ||
184 | self.handle_change(entry.path().to_path_buf(), ChangeKind::Create); | ||
185 | } | ||
186 | } | ||
187 | Err(e) => log::warn!("watcher error: {}", e), | ||
188 | } | ||
189 | } | ||
190 | } | ||
191 | } | ||
192 | |||
193 | fn watch_one(watcher: &Mutex<Option<RecommendedWatcher>>, dir: &Path) { | ||
194 | if let Some(watcher) = watcher.lock().as_mut() { | ||
195 | match watcher.watch(dir, RecursiveMode::NonRecursive) { | ||
196 | Ok(()) => log::debug!("watching \"{}\"", dir.display()), | ||
197 | Err(e) => log::warn!("could not watch \"{}\": {}", dir.display(), e), | ||
198 | } | ||
199 | } | ||
200 | } | ||
diff --git a/crates/ra_vfs/src/lib.rs b/crates/ra_vfs/src/lib.rs index cdea18d73..d1b0222e7 100644 --- a/crates/ra_vfs/src/lib.rs +++ b/crates/ra_vfs/src/lib.rs | |||
@@ -16,52 +16,77 @@ | |||
16 | mod io; | 16 | mod io; |
17 | 17 | ||
18 | use std::{ | 18 | use std::{ |
19 | fmt, | ||
20 | mem, | ||
21 | thread, | ||
22 | cmp::Reverse, | 19 | cmp::Reverse, |
20 | fmt, fs, mem, | ||
21 | ops::{Deref, DerefMut}, | ||
23 | path::{Path, PathBuf}, | 22 | path::{Path, PathBuf}, |
24 | ffi::OsStr, | ||
25 | sync::Arc, | 23 | sync::Arc, |
26 | fs, | 24 | thread, |
27 | }; | 25 | }; |
28 | 26 | ||
29 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
30 | use relative_path::RelativePathBuf; | ||
31 | use crossbeam_channel::Receiver; | 27 | use crossbeam_channel::Receiver; |
28 | use ra_arena::{impl_arena_id, Arena, RawId}; | ||
29 | use relative_path::{Component, RelativePath, RelativePathBuf}; | ||
30 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
32 | use walkdir::DirEntry; | 31 | use walkdir::DirEntry; |
33 | use thread_worker::WorkerHandle; | ||
34 | use ra_arena::{Arena, RawId, impl_arena_id}; | ||
35 | 32 | ||
36 | pub use crate::io::TaskResult as VfsTask; | 33 | pub use crate::io::TaskResult as VfsTask; |
34 | use io::{TaskResult, Worker}; | ||
37 | 35 | ||
38 | /// `RootFilter` is a predicate that checks if a file can belong to a root. If | 36 | /// `RootFilter` is a predicate that checks if a file can belong to a root. If |
39 | /// several filters match a file (nested dirs), the most nested one wins. | 37 | /// several filters match a file (nested dirs), the most nested one wins. |
40 | struct RootFilter { | 38 | pub(crate) struct RootFilter { |
41 | root: PathBuf, | 39 | root: PathBuf, |
42 | file_filter: fn(&Path) -> bool, | 40 | filter: fn(&Path, &RelativePath) -> bool, |
41 | excluded_dirs: Vec<PathBuf>, | ||
43 | } | 42 | } |
44 | 43 | ||
45 | impl RootFilter { | 44 | impl RootFilter { |
46 | fn new(root: PathBuf) -> RootFilter { | 45 | fn new(root: PathBuf, excluded_dirs: Vec<PathBuf>) -> RootFilter { |
47 | RootFilter { | 46 | RootFilter { |
48 | root, | 47 | root, |
49 | file_filter: has_rs_extension, | 48 | filter: default_filter, |
49 | excluded_dirs, | ||
50 | } | 50 | } |
51 | } | 51 | } |
52 | /// Check if this root can contain `path`. NB: even if this returns | 52 | /// Check if this root can contain `path`. NB: even if this returns |
53 | /// true, the `path` might actually be conained in some nested root. | 53 | /// true, the `path` might actually be conained in some nested root. |
54 | fn can_contain(&self, path: &Path) -> Option<RelativePathBuf> { | 54 | pub(crate) fn can_contain(&self, path: &Path) -> Option<RelativePathBuf> { |
55 | if !(self.file_filter)(path) { | 55 | let rel_path = path.strip_prefix(&self.root).ok()?; |
56 | let rel_path = RelativePathBuf::from_path(rel_path).ok()?; | ||
57 | if !(self.filter)(path, rel_path.as_relative_path()) { | ||
56 | return None; | 58 | return None; |
57 | } | 59 | } |
58 | let path = path.strip_prefix(&self.root).ok()?; | 60 | Some(rel_path) |
59 | RelativePathBuf::from_path(path).ok() | 61 | } |
62 | |||
63 | pub(crate) fn entry_filter<'a>(&'a self) -> impl FnMut(&DirEntry) -> bool + 'a { | ||
64 | move |entry: &DirEntry| { | ||
65 | if entry.file_type().is_dir() && self.excluded_dirs.iter().any(|it| it == entry.path()) | ||
66 | { | ||
67 | // do not walk nested roots | ||
68 | false | ||
69 | } else { | ||
70 | self.can_contain(entry.path()).is_some() | ||
71 | } | ||
72 | } | ||
60 | } | 73 | } |
61 | } | 74 | } |
62 | 75 | ||
63 | fn has_rs_extension(p: &Path) -> bool { | 76 | pub(crate) fn default_filter(path: &Path, rel_path: &RelativePath) -> bool { |
64 | p.extension() == Some(OsStr::new("rs")) | 77 | if path.is_dir() { |
78 | for (i, c) in rel_path.components().enumerate() { | ||
79 | if let Component::Normal(c) = c { | ||
80 | // TODO hardcoded for now | ||
81 | if (i == 0 && c == "target") || c == ".git" || c == "node_modules" { | ||
82 | return false; | ||
83 | } | ||
84 | } | ||
85 | } | ||
86 | true | ||
87 | } else { | ||
88 | rel_path.extension() == Some("rs") | ||
89 | } | ||
65 | } | 90 | } |
66 | 91 | ||
67 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] | 92 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] |
@@ -75,16 +100,58 @@ impl_arena_id!(VfsFile); | |||
75 | struct VfsFileData { | 100 | struct VfsFileData { |
76 | root: VfsRoot, | 101 | root: VfsRoot, |
77 | path: RelativePathBuf, | 102 | path: RelativePathBuf, |
103 | is_overlayed: bool, | ||
78 | text: Arc<String>, | 104 | text: Arc<String>, |
79 | } | 105 | } |
80 | 106 | ||
107 | pub(crate) struct Roots { | ||
108 | roots: Arena<VfsRoot, Arc<RootFilter>>, | ||
109 | } | ||
110 | |||
111 | impl Roots { | ||
112 | pub(crate) fn new(mut paths: Vec<PathBuf>) -> Roots { | ||
113 | let mut roots = Arena::default(); | ||
114 | // A hack to make nesting work. | ||
115 | paths.sort_by_key(|it| Reverse(it.as_os_str().len())); | ||
116 | for (i, path) in paths.iter().enumerate() { | ||
117 | let nested_roots = paths[..i] | ||
118 | .iter() | ||
119 | .filter(|it| it.starts_with(path)) | ||
120 | .map(|it| it.clone()) | ||
121 | .collect::<Vec<_>>(); | ||
122 | |||
123 | let root_filter = Arc::new(RootFilter::new(path.clone(), nested_roots)); | ||
124 | |||
125 | roots.alloc(root_filter.clone()); | ||
126 | } | ||
127 | Roots { roots } | ||
128 | } | ||
129 | pub(crate) fn find(&self, path: &Path) -> Option<(VfsRoot, RelativePathBuf)> { | ||
130 | self.roots | ||
131 | .iter() | ||
132 | .find_map(|(root, data)| data.can_contain(path).map(|it| (root, it))) | ||
133 | } | ||
134 | } | ||
135 | |||
136 | impl Deref for Roots { | ||
137 | type Target = Arena<VfsRoot, Arc<RootFilter>>; | ||
138 | fn deref(&self) -> &Self::Target { | ||
139 | &self.roots | ||
140 | } | ||
141 | } | ||
142 | |||
143 | impl DerefMut for Roots { | ||
144 | fn deref_mut(&mut self) -> &mut Self::Target { | ||
145 | &mut self.roots | ||
146 | } | ||
147 | } | ||
148 | |||
81 | pub struct Vfs { | 149 | pub struct Vfs { |
82 | roots: Arena<VfsRoot, RootFilter>, | 150 | roots: Arc<Roots>, |
83 | files: Arena<VfsFile, VfsFileData>, | 151 | files: Arena<VfsFile, VfsFileData>, |
84 | root2files: FxHashMap<VfsRoot, FxHashSet<VfsFile>>, | 152 | root2files: FxHashMap<VfsRoot, FxHashSet<VfsFile>>, |
85 | pending_changes: Vec<VfsChange>, | 153 | pending_changes: Vec<VfsChange>, |
86 | worker: io::Worker, | 154 | worker: Worker, |
87 | worker_handle: WorkerHandle, | ||
88 | } | 155 | } |
89 | 156 | ||
90 | impl fmt::Debug for Vfs { | 157 | impl fmt::Debug for Vfs { |
@@ -94,44 +161,30 @@ impl fmt::Debug for Vfs { | |||
94 | } | 161 | } |
95 | 162 | ||
96 | impl Vfs { | 163 | impl Vfs { |
97 | pub fn new(mut roots: Vec<PathBuf>) -> (Vfs, Vec<VfsRoot>) { | 164 | pub fn new(roots: Vec<PathBuf>) -> (Vfs, Vec<VfsRoot>) { |
98 | let (worker, worker_handle) = io::start(); | 165 | let roots = Arc::new(Roots::new(roots)); |
99 | 166 | let worker = io::Worker::start(roots.clone()); | |
100 | let mut res = Vfs { | 167 | let mut root2files = FxHashMap::default(); |
101 | roots: Arena::default(), | 168 | |
169 | for (root, filter) in roots.iter() { | ||
170 | root2files.insert(root, Default::default()); | ||
171 | worker | ||
172 | .sender() | ||
173 | .send(io::Task::AddRoot { | ||
174 | root, | ||
175 | filter: filter.clone(), | ||
176 | }) | ||
177 | .unwrap(); | ||
178 | } | ||
179 | let res = Vfs { | ||
180 | roots, | ||
102 | files: Arena::default(), | 181 | files: Arena::default(), |
103 | root2files: FxHashMap::default(), | 182 | root2files, |
104 | worker, | 183 | worker, |
105 | worker_handle, | ||
106 | pending_changes: Vec::new(), | 184 | pending_changes: Vec::new(), |
107 | }; | 185 | }; |
108 | 186 | let vfs_roots = res.roots.iter().map(|(id, _)| id).collect(); | |
109 | // A hack to make nesting work. | 187 | (res, vfs_roots) |
110 | roots.sort_by_key(|it| Reverse(it.as_os_str().len())); | ||
111 | for (i, path) in roots.iter().enumerate() { | ||
112 | let root = res.roots.alloc(RootFilter::new(path.clone())); | ||
113 | res.root2files.insert(root, Default::default()); | ||
114 | let nested = roots[..i] | ||
115 | .iter() | ||
116 | .filter(|it| it.starts_with(path)) | ||
117 | .map(|it| it.clone()) | ||
118 | .collect::<Vec<_>>(); | ||
119 | let filter = move |entry: &DirEntry| { | ||
120 | if entry.file_type().is_file() { | ||
121 | has_rs_extension(entry.path()) | ||
122 | } else { | ||
123 | nested.iter().all(|it| it != entry.path()) | ||
124 | } | ||
125 | }; | ||
126 | let task = io::Task { | ||
127 | root, | ||
128 | path: path.clone(), | ||
129 | filter: Box::new(filter), | ||
130 | }; | ||
131 | res.worker.inp.send(task).unwrap(); | ||
132 | } | ||
133 | let roots = res.roots.iter().map(|(id, _)| id).collect(); | ||
134 | (res, roots) | ||
135 | } | 188 | } |
136 | 189 | ||
137 | pub fn root2path(&self, root: VfsRoot) -> PathBuf { | 190 | pub fn root2path(&self, root: VfsRoot) -> PathBuf { |
@@ -165,7 +218,7 @@ impl Vfs { | |||
165 | } else { | 218 | } else { |
166 | let text = fs::read_to_string(path).unwrap_or_default(); | 219 | let text = fs::read_to_string(path).unwrap_or_default(); |
167 | let text = Arc::new(text); | 220 | let text = Arc::new(text); |
168 | let file = self.add_file(root, rel_path.clone(), Arc::clone(&text)); | 221 | let file = self.add_file(root, rel_path.clone(), Arc::clone(&text), false); |
169 | let change = VfsChange::AddFile { | 222 | let change = VfsChange::AddFile { |
170 | file, | 223 | file, |
171 | text, | 224 | text, |
@@ -180,85 +233,130 @@ impl Vfs { | |||
180 | } | 233 | } |
181 | 234 | ||
182 | pub fn task_receiver(&self) -> &Receiver<io::TaskResult> { | 235 | pub fn task_receiver(&self) -> &Receiver<io::TaskResult> { |
183 | &self.worker.out | 236 | self.worker.receiver() |
184 | } | 237 | } |
185 | 238 | ||
186 | pub fn handle_task(&mut self, task: io::TaskResult) { | 239 | pub fn handle_task(&mut self, task: io::TaskResult) { |
187 | let mut files = Vec::new(); | 240 | match task { |
188 | // While we were scanning the root in the backgound, a file might have | 241 | TaskResult::BulkLoadRoot { root, files } => { |
189 | // been open in the editor, so we need to account for that. | 242 | let mut cur_files = Vec::new(); |
190 | let exising = self.root2files[&task.root] | 243 | // While we were scanning the root in the backgound, a file might have |
191 | .iter() | 244 | // been open in the editor, so we need to account for that. |
192 | .map(|&file| (self.files[file].path.clone(), file)) | 245 | let exising = self.root2files[&root] |
193 | .collect::<FxHashMap<_, _>>(); | 246 | .iter() |
194 | for (path, text) in task.files { | 247 | .map(|&file| (self.files[file].path.clone(), file)) |
195 | if let Some(&file) = exising.get(&path) { | 248 | .collect::<FxHashMap<_, _>>(); |
196 | let text = Arc::clone(&self.files[file].text); | 249 | for (path, text) in files { |
197 | files.push((file, path, text)); | 250 | if let Some(&file) = exising.get(&path) { |
198 | continue; | 251 | let text = Arc::clone(&self.files[file].text); |
252 | cur_files.push((file, path, text)); | ||
253 | continue; | ||
254 | } | ||
255 | let text = Arc::new(text); | ||
256 | let file = self.add_file(root, path.clone(), Arc::clone(&text), false); | ||
257 | cur_files.push((file, path, text)); | ||
258 | } | ||
259 | |||
260 | let change = VfsChange::AddRoot { | ||
261 | root, | ||
262 | files: cur_files, | ||
263 | }; | ||
264 | self.pending_changes.push(change); | ||
265 | } | ||
266 | TaskResult::AddSingleFile { root, path, text } => { | ||
267 | self.do_add_file(root, path, text, false); | ||
268 | } | ||
269 | TaskResult::ChangeSingleFile { root, path, text } => { | ||
270 | if let Some(file) = self.find_file(root, &path) { | ||
271 | self.do_change_file(file, text, false); | ||
272 | } else { | ||
273 | self.do_add_file(root, path, text, false); | ||
274 | } | ||
275 | } | ||
276 | TaskResult::RemoveSingleFile { root, path } => { | ||
277 | if let Some(file) = self.find_file(root, &path) { | ||
278 | self.do_remove_file(root, path, file, false); | ||
279 | } | ||
199 | } | 280 | } |
200 | let text = Arc::new(text); | ||
201 | let file = self.add_file(task.root, path.clone(), Arc::clone(&text)); | ||
202 | files.push((file, path, text)); | ||
203 | } | 281 | } |
282 | } | ||
204 | 283 | ||
205 | let change = VfsChange::AddRoot { | 284 | fn do_add_file( |
206 | root: task.root, | 285 | &mut self, |
207 | files, | 286 | root: VfsRoot, |
208 | }; | 287 | path: RelativePathBuf, |
209 | self.pending_changes.push(change); | 288 | text: String, |
289 | is_overlay: bool, | ||
290 | ) -> Option<VfsFile> { | ||
291 | let text = Arc::new(text); | ||
292 | let file = self.add_file(root, path.clone(), text.clone(), is_overlay); | ||
293 | self.pending_changes.push(VfsChange::AddFile { | ||
294 | file, | ||
295 | root, | ||
296 | path, | ||
297 | text, | ||
298 | }); | ||
299 | Some(file) | ||
300 | } | ||
301 | |||
302 | fn do_change_file(&mut self, file: VfsFile, text: String, is_overlay: bool) { | ||
303 | if !is_overlay && self.files[file].is_overlayed { | ||
304 | return; | ||
305 | } | ||
306 | let text = Arc::new(text); | ||
307 | self.change_file(file, text.clone(), is_overlay); | ||
308 | self.pending_changes | ||
309 | .push(VfsChange::ChangeFile { file, text }); | ||
310 | } | ||
311 | |||
312 | fn do_remove_file( | ||
313 | &mut self, | ||
314 | root: VfsRoot, | ||
315 | path: RelativePathBuf, | ||
316 | file: VfsFile, | ||
317 | is_overlay: bool, | ||
318 | ) { | ||
319 | if !is_overlay && self.files[file].is_overlayed { | ||
320 | return; | ||
321 | } | ||
322 | self.remove_file(file); | ||
323 | self.pending_changes | ||
324 | .push(VfsChange::RemoveFile { root, path, file }); | ||
210 | } | 325 | } |
211 | 326 | ||
212 | pub fn add_file_overlay(&mut self, path: &Path, text: String) -> Option<VfsFile> { | 327 | pub fn add_file_overlay(&mut self, path: &Path, text: String) -> Option<VfsFile> { |
213 | let mut res = None; | 328 | if let Some((root, rel_path, file)) = self.find_root(path) { |
214 | if let Some((root, path, file)) = self.find_root(path) { | 329 | if let Some(file) = file { |
215 | let text = Arc::new(text); | 330 | self.do_change_file(file, text, true); |
216 | let change = if let Some(file) = file { | 331 | Some(file) |
217 | res = Some(file); | ||
218 | self.change_file(file, Arc::clone(&text)); | ||
219 | VfsChange::ChangeFile { file, text } | ||
220 | } else { | 332 | } else { |
221 | let file = self.add_file(root, path.clone(), Arc::clone(&text)); | 333 | self.do_add_file(root, rel_path, text, true) |
222 | res = Some(file); | 334 | } |
223 | VfsChange::AddFile { | 335 | } else { |
224 | file, | 336 | None |
225 | text, | ||
226 | root, | ||
227 | path, | ||
228 | } | ||
229 | }; | ||
230 | self.pending_changes.push(change); | ||
231 | } | 337 | } |
232 | res | ||
233 | } | 338 | } |
234 | 339 | ||
235 | pub fn change_file_overlay(&mut self, path: &Path, new_text: String) { | 340 | pub fn change_file_overlay(&mut self, path: &Path, new_text: String) { |
236 | if let Some((_root, _path, file)) = self.find_root(path) { | 341 | if let Some((_root, _path, file)) = self.find_root(path) { |
237 | let file = file.expect("can't change a file which wasn't added"); | 342 | let file = file.expect("can't change a file which wasn't added"); |
238 | let text = Arc::new(new_text); | 343 | self.do_change_file(file, new_text, true); |
239 | self.change_file(file, Arc::clone(&text)); | ||
240 | let change = VfsChange::ChangeFile { file, text }; | ||
241 | self.pending_changes.push(change); | ||
242 | } | 344 | } |
243 | } | 345 | } |
244 | 346 | ||
245 | pub fn remove_file_overlay(&mut self, path: &Path) -> Option<VfsFile> { | 347 | pub fn remove_file_overlay(&mut self, path: &Path) -> Option<VfsFile> { |
246 | let mut res = None; | ||
247 | if let Some((root, path, file)) = self.find_root(path) { | 348 | if let Some((root, path, file)) = self.find_root(path) { |
248 | let file = file.expect("can't remove a file which wasn't added"); | 349 | let file = file.expect("can't remove a file which wasn't added"); |
249 | res = Some(file); | ||
250 | let full_path = path.to_path(&self.roots[root].root); | 350 | let full_path = path.to_path(&self.roots[root].root); |
251 | let change = if let Ok(text) = fs::read_to_string(&full_path) { | 351 | if let Ok(text) = fs::read_to_string(&full_path) { |
252 | let text = Arc::new(text); | 352 | self.do_change_file(file, text, true); |
253 | self.change_file(file, Arc::clone(&text)); | ||
254 | VfsChange::ChangeFile { file, text } | ||
255 | } else { | 353 | } else { |
256 | self.remove_file(file); | 354 | self.do_remove_file(root, path, file, true); |
257 | VfsChange::RemoveFile { root, file, path } | 355 | } |
258 | }; | 356 | Some(file) |
259 | self.pending_changes.push(change); | 357 | } else { |
358 | None | ||
260 | } | 359 | } |
261 | res | ||
262 | } | 360 | } |
263 | 361 | ||
264 | pub fn commit_changes(&mut self) -> Vec<VfsChange> { | 362 | pub fn commit_changes(&mut self) -> Vec<VfsChange> { |
@@ -267,19 +365,31 @@ impl Vfs { | |||
267 | 365 | ||
268 | /// Sutdown the VFS and terminate the background watching thread. | 366 | /// Sutdown the VFS and terminate the background watching thread. |
269 | pub fn shutdown(self) -> thread::Result<()> { | 367 | pub fn shutdown(self) -> thread::Result<()> { |
270 | let _ = self.worker.shutdown(); | 368 | self.worker.shutdown() |
271 | self.worker_handle.shutdown() | ||
272 | } | 369 | } |
273 | 370 | ||
274 | fn add_file(&mut self, root: VfsRoot, path: RelativePathBuf, text: Arc<String>) -> VfsFile { | 371 | fn add_file( |
275 | let data = VfsFileData { root, path, text }; | 372 | &mut self, |
373 | root: VfsRoot, | ||
374 | path: RelativePathBuf, | ||
375 | text: Arc<String>, | ||
376 | is_overlayed: bool, | ||
377 | ) -> VfsFile { | ||
378 | let data = VfsFileData { | ||
379 | root, | ||
380 | path, | ||
381 | text, | ||
382 | is_overlayed, | ||
383 | }; | ||
276 | let file = self.files.alloc(data); | 384 | let file = self.files.alloc(data); |
277 | self.root2files.get_mut(&root).unwrap().insert(file); | 385 | self.root2files.get_mut(&root).unwrap().insert(file); |
278 | file | 386 | file |
279 | } | 387 | } |
280 | 388 | ||
281 | fn change_file(&mut self, file: VfsFile, new_text: Arc<String>) { | 389 | fn change_file(&mut self, file: VfsFile, new_text: Arc<String>, is_overlayed: bool) { |
282 | self.files[file].text = new_text; | 390 | let mut file_data = &mut self.files[file]; |
391 | file_data.text = new_text; | ||
392 | file_data.is_overlayed = is_overlayed; | ||
283 | } | 393 | } |
284 | 394 | ||
285 | fn remove_file(&mut self, file: VfsFile) { | 395 | fn remove_file(&mut self, file: VfsFile) { |
@@ -292,15 +402,16 @@ impl Vfs { | |||
292 | } | 402 | } |
293 | 403 | ||
294 | fn find_root(&self, path: &Path) -> Option<(VfsRoot, RelativePathBuf, Option<VfsFile>)> { | 404 | fn find_root(&self, path: &Path) -> Option<(VfsRoot, RelativePathBuf, Option<VfsFile>)> { |
295 | let (root, path) = self | 405 | let (root, path) = self.roots.find(&path)?; |
296 | .roots | 406 | let file = self.find_file(root, &path); |
297 | .iter() | 407 | Some((root, path, file)) |
298 | .find_map(|(root, data)| data.can_contain(path).map(|it| (root, it)))?; | 408 | } |
299 | let file = self.root2files[&root] | 409 | |
410 | fn find_file(&self, root: VfsRoot, path: &RelativePath) -> Option<VfsFile> { | ||
411 | self.root2files[&root] | ||
300 | .iter() | 412 | .iter() |
301 | .map(|&it| it) | 413 | .map(|&it| it) |
302 | .find(|&file| self.files[file].path == path); | 414 | .find(|&file| self.files[file].path == path) |
303 | Some((root, path, file)) | ||
304 | } | 415 | } |
305 | } | 416 | } |
306 | 417 | ||
diff --git a/crates/ra_vfs/tests/vfs.rs b/crates/ra_vfs/tests/vfs.rs index f56fc4603..357e1c775 100644 --- a/crates/ra_vfs/tests/vfs.rs +++ b/crates/ra_vfs/tests/vfs.rs | |||
@@ -1,24 +1,47 @@ | |||
1 | use std::{ | 1 | use std::{collections::HashSet, fs, time::Duration}; |
2 | fs, | ||
3 | collections::HashSet, | ||
4 | }; | ||
5 | 2 | ||
3 | // use flexi_logger::Logger; | ||
4 | use crossbeam_channel::RecvTimeoutError; | ||
5 | use ra_vfs::{Vfs, VfsChange}; | ||
6 | use tempfile::tempdir; | 6 | use tempfile::tempdir; |
7 | 7 | ||
8 | use ra_vfs::{Vfs, VfsChange}; | 8 | fn process_tasks(vfs: &mut Vfs, num_tasks: u32) { |
9 | for _ in 0..num_tasks { | ||
10 | let task = vfs | ||
11 | .task_receiver() | ||
12 | .recv_timeout(Duration::from_secs(3)) | ||
13 | .unwrap(); | ||
14 | log::debug!("{:?}", task); | ||
15 | vfs.handle_task(task); | ||
16 | } | ||
17 | } | ||
18 | |||
19 | macro_rules! assert_match { | ||
20 | ($x:expr, $pat:pat) => { | ||
21 | assert_match!($x, $pat, ()) | ||
22 | }; | ||
23 | ($x:expr, $pat:pat, $assert:expr) => { | ||
24 | match $x { | ||
25 | $pat => $assert, | ||
26 | x => assert!(false, "Expected {}, got {:?}", stringify!($pat), x), | ||
27 | }; | ||
28 | }; | ||
29 | } | ||
9 | 30 | ||
10 | #[test] | 31 | #[test] |
11 | fn test_vfs_works() -> std::io::Result<()> { | 32 | fn test_vfs_works() -> std::io::Result<()> { |
33 | // Logger::with_str("vfs=debug,ra_vfs=debug").start().unwrap(); | ||
34 | |||
12 | let files = [ | 35 | let files = [ |
13 | ("a/foo.rs", "hello"), | 36 | ("a/foo.rs", "hello"), |
14 | ("a/bar.rs", "world"), | 37 | ("a/bar.rs", "world"), |
15 | ("a/b/baz.rs", "nested hello"), | 38 | ("a/b/baz.rs", "nested hello"), |
16 | ]; | 39 | ]; |
17 | 40 | ||
18 | let dir = tempdir()?; | 41 | let dir = tempdir().unwrap(); |
19 | for (path, text) in files.iter() { | 42 | for (path, text) in files.iter() { |
20 | let file_path = dir.path().join(path); | 43 | let file_path = dir.path().join(path); |
21 | fs::create_dir_all(file_path.parent().unwrap())?; | 44 | fs::create_dir_all(file_path.parent().unwrap()).unwrap(); |
22 | fs::write(file_path, text)? | 45 | fs::write(file_path, text)? |
23 | } | 46 | } |
24 | 47 | ||
@@ -26,10 +49,7 @@ fn test_vfs_works() -> std::io::Result<()> { | |||
26 | let b_root = dir.path().join("a/b"); | 49 | let b_root = dir.path().join("a/b"); |
27 | 50 | ||
28 | let (mut vfs, _) = Vfs::new(vec![a_root, b_root]); | 51 | let (mut vfs, _) = Vfs::new(vec![a_root, b_root]); |
29 | for _ in 0..2 { | 52 | process_tasks(&mut vfs, 2); |
30 | let task = vfs.task_receiver().recv().unwrap(); | ||
31 | vfs.handle_task(task); | ||
32 | } | ||
33 | { | 53 | { |
34 | let files = vfs | 54 | let files = vfs |
35 | .commit_changes() | 55 | .commit_changes() |
@@ -58,43 +78,101 @@ fn test_vfs_works() -> std::io::Result<()> { | |||
58 | assert_eq!(files, expected_files); | 78 | assert_eq!(files, expected_files); |
59 | } | 79 | } |
60 | 80 | ||
61 | vfs.add_file_overlay(&dir.path().join("a/b/baz.rs"), "quux".to_string()); | 81 | fs::write(&dir.path().join("a/b/baz.rs"), "quux").unwrap(); |
62 | let change = vfs.commit_changes().pop().unwrap(); | 82 | process_tasks(&mut vfs, 1); |
63 | match change { | 83 | assert_match!( |
64 | VfsChange::ChangeFile { text, .. } => assert_eq!(&*text, "quux"), | 84 | vfs.commit_changes().as_slice(), |
65 | _ => panic!("unexpected change"), | 85 | [VfsChange::ChangeFile { text, .. }], |
66 | } | 86 | assert_eq!(text.as_str(), "quux") |
87 | ); | ||
67 | 88 | ||
68 | vfs.change_file_overlay(&dir.path().join("a/b/baz.rs"), "m".to_string()); | 89 | vfs.add_file_overlay(&dir.path().join("a/b/baz.rs"), "m".to_string()); |
69 | let change = vfs.commit_changes().pop().unwrap(); | 90 | assert_match!( |
70 | match change { | 91 | vfs.commit_changes().as_slice(), |
71 | VfsChange::ChangeFile { text, .. } => assert_eq!(&*text, "m"), | 92 | [VfsChange::ChangeFile { text, .. }], |
72 | _ => panic!("unexpected change"), | 93 | assert_eq!(text.as_str(), "m") |
73 | } | 94 | ); |
74 | 95 | ||
96 | // changing file on disk while overlayed doesn't generate a VfsChange | ||
97 | fs::write(&dir.path().join("a/b/baz.rs"), "corge").unwrap(); | ||
98 | process_tasks(&mut vfs, 1); | ||
99 | assert_match!(vfs.commit_changes().as_slice(), []); | ||
100 | |||
101 | // removing overlay restores data on disk | ||
75 | vfs.remove_file_overlay(&dir.path().join("a/b/baz.rs")); | 102 | vfs.remove_file_overlay(&dir.path().join("a/b/baz.rs")); |
76 | let change = vfs.commit_changes().pop().unwrap(); | 103 | assert_match!( |
77 | match change { | 104 | vfs.commit_changes().as_slice(), |
78 | VfsChange::ChangeFile { text, .. } => assert_eq!(&*text, "nested hello"), | 105 | [VfsChange::ChangeFile { text, .. }], |
79 | _ => panic!("unexpected change"), | 106 | assert_eq!(text.as_str(), "corge") |
80 | } | 107 | ); |
81 | 108 | ||
82 | vfs.add_file_overlay(&dir.path().join("a/b/spam.rs"), "spam".to_string()); | 109 | vfs.add_file_overlay(&dir.path().join("a/b/spam.rs"), "spam".to_string()); |
83 | let change = vfs.commit_changes().pop().unwrap(); | 110 | assert_match!( |
84 | match change { | 111 | vfs.commit_changes().as_slice(), |
85 | VfsChange::AddFile { text, path, .. } => { | 112 | [VfsChange::AddFile { text, path, .. }], |
86 | assert_eq!(&*text, "spam"); | 113 | { |
114 | assert_eq!(text.as_str(), "spam"); | ||
87 | assert_eq!(path, "spam.rs"); | 115 | assert_eq!(path, "spam.rs"); |
88 | } | 116 | } |
89 | _ => panic!("unexpected change"), | 117 | ); |
90 | } | ||
91 | 118 | ||
92 | vfs.remove_file_overlay(&dir.path().join("a/b/spam.rs")); | 119 | vfs.remove_file_overlay(&dir.path().join("a/b/spam.rs")); |
93 | let change = vfs.commit_changes().pop().unwrap(); | 120 | assert_match!( |
94 | match change { | 121 | vfs.commit_changes().as_slice(), |
95 | VfsChange::RemoveFile { .. } => (), | 122 | [VfsChange::RemoveFile { path, .. }], |
96 | _ => panic!("unexpected change"), | 123 | assert_eq!(path, "spam.rs") |
97 | } | 124 | ); |
125 | |||
126 | fs::create_dir_all(dir.path().join("a/sub1/sub2")).unwrap(); | ||
127 | fs::write(dir.path().join("a/sub1/sub2/new.rs"), "new hello").unwrap(); | ||
128 | process_tasks(&mut vfs, 1); | ||
129 | assert_match!( | ||
130 | vfs.commit_changes().as_slice(), | ||
131 | [VfsChange::AddFile { text, path, .. }], | ||
132 | { | ||
133 | assert_eq!(text.as_str(), "new hello"); | ||
134 | assert_eq!(path, "sub1/sub2/new.rs"); | ||
135 | } | ||
136 | ); | ||
137 | |||
138 | fs::rename( | ||
139 | &dir.path().join("a/sub1/sub2/new.rs"), | ||
140 | &dir.path().join("a/sub1/sub2/new1.rs"), | ||
141 | ) | ||
142 | .unwrap(); | ||
143 | process_tasks(&mut vfs, 2); | ||
144 | assert_match!( | ||
145 | vfs.commit_changes().as_slice(), | ||
146 | [VfsChange::RemoveFile { | ||
147 | path: removed_path, .. | ||
148 | }, VfsChange::AddFile { | ||
149 | text, | ||
150 | path: added_path, | ||
151 | .. | ||
152 | }], | ||
153 | { | ||
154 | assert_eq!(removed_path, "sub1/sub2/new.rs"); | ||
155 | assert_eq!(added_path, "sub1/sub2/new1.rs"); | ||
156 | assert_eq!(text.as_str(), "new hello"); | ||
157 | } | ||
158 | ); | ||
159 | |||
160 | fs::remove_file(&dir.path().join("a/sub1/sub2/new1.rs")).unwrap(); | ||
161 | process_tasks(&mut vfs, 1); | ||
162 | assert_match!( | ||
163 | vfs.commit_changes().as_slice(), | ||
164 | [VfsChange::RemoveFile { path, .. }], | ||
165 | assert_eq!(path, "sub1/sub2/new1.rs") | ||
166 | ); | ||
167 | |||
168 | // should be ignored | ||
169 | fs::create_dir_all(dir.path().join("a/target")).unwrap(); | ||
170 | fs::write(&dir.path().join("a/target/new.rs"), "ignore me").unwrap(); | ||
171 | |||
172 | assert_match!( | ||
173 | vfs.task_receiver().recv_timeout(Duration::from_millis(300)), // slightly more than watcher debounce delay | ||
174 | Err(RecvTimeoutError::Timeout) | ||
175 | ); | ||
98 | 176 | ||
99 | vfs.shutdown().unwrap(); | 177 | vfs.shutdown().unwrap(); |
100 | Ok(()) | 178 | Ok(()) |