diff options
author | Zac Pullar-Strecker <[email protected]> | 2020-07-31 03:12:44 +0100 |
---|---|---|
committer | Zac Pullar-Strecker <[email protected]> | 2020-07-31 03:12:44 +0100 |
commit | f05d7b41a719d848844b054a16477b29d0f063c6 (patch) | |
tree | 0a8a0946e8aef2ce64d4c13d0035ba41cce2daf3 /crates/ra_ide_db | |
parent | 73ff610e41959e3e7c78a2b4b25b086883132956 (diff) | |
parent | 6b7cb8b5ab539fc4333ce34bc29bf77c976f232a (diff) |
Merge remote-tracking branch 'upstream/master' into 503-hover-doc-links
Hasn't fixed tests yet.
Diffstat (limited to 'crates/ra_ide_db')
-rw-r--r-- | crates/ra_ide_db/Cargo.toml | 4 | ||||
-rw-r--r-- | crates/ra_ide_db/src/change.rs | 87 | ||||
-rw-r--r-- | crates/ra_ide_db/src/defs.rs | 91 | ||||
-rw-r--r-- | crates/ra_ide_db/src/imports_locator.rs | 88 | ||||
-rw-r--r-- | crates/ra_ide_db/src/lib.rs | 43 | ||||
-rw-r--r-- | crates/ra_ide_db/src/line_index.rs | 8 | ||||
-rw-r--r-- | crates/ra_ide_db/src/search.rs | 18 | ||||
-rw-r--r-- | crates/ra_ide_db/src/source_change.rs | 2 | ||||
-rw-r--r-- | crates/ra_ide_db/src/symbol_index.rs | 24 |
9 files changed, 209 insertions, 156 deletions
diff --git a/crates/ra_ide_db/Cargo.toml b/crates/ra_ide_db/Cargo.toml index b14206c9b..f345f1de8 100644 --- a/crates/ra_ide_db/Cargo.toml +++ b/crates/ra_ide_db/Cargo.toml | |||
@@ -3,6 +3,7 @@ edition = "2018" | |||
3 | name = "ra_ide_db" | 3 | name = "ra_ide_db" |
4 | version = "0.1.0" | 4 | version = "0.1.0" |
5 | authors = ["rust-analyzer developers"] | 5 | authors = ["rust-analyzer developers"] |
6 | license = "MIT OR Apache-2.0" | ||
6 | 7 | ||
7 | [lib] | 8 | [lib] |
8 | doctest = false | 9 | doctest = false |
@@ -15,10 +16,11 @@ log = "0.4.8" | |||
15 | rayon = "1.3.0" | 16 | rayon = "1.3.0" |
16 | fst = { version = "0.4", default-features = false } | 17 | fst = { version = "0.4", default-features = false } |
17 | rustc-hash = "1.1.0" | 18 | rustc-hash = "1.1.0" |
18 | superslice = "1.0.0" | ||
19 | once_cell = "1.3.1" | 19 | once_cell = "1.3.1" |
20 | either = "1.5.3" | 20 | either = "1.5.3" |
21 | 21 | ||
22 | stdx = { path = "../stdx" } | ||
23 | |||
22 | ra_syntax = { path = "../ra_syntax" } | 24 | ra_syntax = { path = "../ra_syntax" } |
23 | ra_text_edit = { path = "../ra_text_edit" } | 25 | ra_text_edit = { path = "../ra_text_edit" } |
24 | ra_db = { path = "../ra_db" } | 26 | ra_db = { path = "../ra_db" } |
diff --git a/crates/ra_ide_db/src/change.rs b/crates/ra_ide_db/src/change.rs index b507000f2..32d9a8d1f 100644 --- a/crates/ra_ide_db/src/change.rs +++ b/crates/ra_ide_db/src/change.rs | |||
@@ -5,8 +5,7 @@ use std::{fmt, sync::Arc, time}; | |||
5 | 5 | ||
6 | use ra_db::{ | 6 | use ra_db::{ |
7 | salsa::{Database, Durability, SweepStrategy}, | 7 | salsa::{Database, Durability, SweepStrategy}, |
8 | CrateGraph, FileId, RelativePathBuf, SourceDatabase, SourceDatabaseExt, SourceRoot, | 8 | CrateGraph, FileId, SourceDatabase, SourceDatabaseExt, SourceRoot, SourceRootId, |
9 | SourceRootId, | ||
10 | }; | 9 | }; |
11 | use ra_prof::{memory_usage, profile, Bytes}; | 10 | use ra_prof::{memory_usage, profile, Bytes}; |
12 | use rustc_hash::FxHashSet; | 11 | use rustc_hash::FxHashSet; |
@@ -57,14 +56,14 @@ impl AnalysisChange { | |||
57 | #[derive(Debug)] | 56 | #[derive(Debug)] |
58 | struct AddFile { | 57 | struct AddFile { |
59 | file_id: FileId, | 58 | file_id: FileId, |
60 | path: RelativePathBuf, | 59 | path: String, |
61 | text: Arc<String>, | 60 | text: Arc<String>, |
62 | } | 61 | } |
63 | 62 | ||
64 | #[derive(Debug)] | 63 | #[derive(Debug)] |
65 | struct RemoveFile { | 64 | struct RemoveFile { |
66 | file_id: FileId, | 65 | file_id: FileId, |
67 | path: RelativePathBuf, | 66 | path: String, |
68 | } | 67 | } |
69 | 68 | ||
70 | #[derive(Default)] | 69 | #[derive(Default)] |
@@ -147,37 +146,46 @@ impl RootDatabase { | |||
147 | 146 | ||
148 | let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); | 147 | let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); |
149 | 148 | ||
150 | self.query(ra_db::ParseQuery).sweep(sweep); | 149 | ra_db::ParseQuery.in_db(self).sweep(sweep); |
151 | self.query(hir::db::ParseMacroQuery).sweep(sweep); | 150 | hir::db::ParseMacroQuery.in_db(self).sweep(sweep); |
152 | 151 | ||
153 | // Macros do take significant space, but less then the syntax trees | 152 | // Macros do take significant space, but less then the syntax trees |
154 | // self.query(hir::db::MacroDefQuery).sweep(sweep); | 153 | // self.query(hir::db::MacroDefQuery).sweep(sweep); |
155 | // self.query(hir::db::MacroArgQuery).sweep(sweep); | 154 | // self.query(hir::db::MacroArgTextQuery).sweep(sweep); |
156 | // self.query(hir::db::MacroExpandQuery).sweep(sweep); | 155 | // self.query(hir::db::MacroExpandQuery).sweep(sweep); |
157 | 156 | ||
158 | self.query(hir::db::AstIdMapQuery).sweep(sweep); | 157 | hir::db::AstIdMapQuery.in_db(self).sweep(sweep); |
159 | 158 | ||
160 | self.query(hir::db::BodyWithSourceMapQuery).sweep(sweep); | 159 | hir::db::BodyWithSourceMapQuery.in_db(self).sweep(sweep); |
161 | 160 | ||
162 | self.query(hir::db::ExprScopesQuery).sweep(sweep); | 161 | hir::db::ExprScopesQuery.in_db(self).sweep(sweep); |
163 | self.query(hir::db::InferQueryQuery).sweep(sweep); | 162 | hir::db::InferQueryQuery.in_db(self).sweep(sweep); |
164 | self.query(hir::db::BodyQuery).sweep(sweep); | 163 | hir::db::BodyQuery.in_db(self).sweep(sweep); |
165 | } | 164 | } |
166 | 165 | ||
166 | // Feature: Memory Usage | ||
167 | // | ||
168 | // Clears rust-analyzer's internal database and prints memory usage statistics. | ||
169 | // | ||
170 | // |=== | ||
171 | // | Editor | Action Name | ||
172 | // | ||
173 | // | VS Code | **Rust Analyzer: Memory Usage (Clears Database)** | ||
174 | // |=== | ||
167 | pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> { | 175 | pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> { |
168 | let mut acc: Vec<(String, Bytes)> = vec![]; | 176 | let mut acc: Vec<(String, Bytes)> = vec![]; |
169 | let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); | 177 | let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); |
170 | macro_rules! sweep_each_query { | 178 | macro_rules! sweep_each_query { |
171 | ($($q:path)*) => {$( | 179 | ($($q:path)*) => {$( |
172 | let before = memory_usage().allocated; | 180 | let before = memory_usage().allocated; |
173 | self.query($q).sweep(sweep); | 181 | $q.in_db(self).sweep(sweep); |
174 | let after = memory_usage().allocated; | 182 | let after = memory_usage().allocated; |
175 | let q: $q = Default::default(); | 183 | let q: $q = Default::default(); |
176 | let name = format!("{:?}", q); | 184 | let name = format!("{:?}", q); |
177 | acc.push((name, before - after)); | 185 | acc.push((name, before - after)); |
178 | 186 | ||
179 | let before = memory_usage().allocated; | 187 | let before = memory_usage().allocated; |
180 | self.query($q).sweep(sweep.discard_everything()); | 188 | $q.in_db(self).sweep(sweep.discard_everything()); |
181 | let after = memory_usage().allocated; | 189 | let after = memory_usage().allocated; |
182 | let q: $q = Default::default(); | 190 | let q: $q = Default::default(); |
183 | let name = format!("{:?} (deps)", q); | 191 | let name = format!("{:?} (deps)", q); |
@@ -191,12 +199,10 @@ impl RootDatabase { | |||
191 | 199 | ||
192 | // AstDatabase | 200 | // AstDatabase |
193 | hir::db::AstIdMapQuery | 201 | hir::db::AstIdMapQuery |
194 | hir::db::InternMacroQuery | 202 | hir::db::MacroArgTextQuery |
195 | hir::db::MacroArgQuery | ||
196 | hir::db::MacroDefQuery | 203 | hir::db::MacroDefQuery |
197 | hir::db::ParseMacroQuery | 204 | hir::db::ParseMacroQuery |
198 | hir::db::MacroExpandQuery | 205 | hir::db::MacroExpandQuery |
199 | hir::db::InternEagerExpansionQuery | ||
200 | 206 | ||
201 | // DefDatabase | 207 | // DefDatabase |
202 | hir::db::ItemTreeQuery | 208 | hir::db::ItemTreeQuery |
@@ -221,17 +227,6 @@ impl RootDatabase { | |||
221 | hir::db::DocumentationQuery | 227 | hir::db::DocumentationQuery |
222 | hir::db::ImportMapQuery | 228 | hir::db::ImportMapQuery |
223 | 229 | ||
224 | // InternDatabase | ||
225 | hir::db::InternFunctionQuery | ||
226 | hir::db::InternStructQuery | ||
227 | hir::db::InternUnionQuery | ||
228 | hir::db::InternEnumQuery | ||
229 | hir::db::InternConstQuery | ||
230 | hir::db::InternStaticQuery | ||
231 | hir::db::InternTraitQuery | ||
232 | hir::db::InternTypeAliasQuery | ||
233 | hir::db::InternImplQuery | ||
234 | |||
235 | // HirDatabase | 230 | // HirDatabase |
236 | hir::db::InferQueryQuery | 231 | hir::db::InferQueryQuery |
237 | hir::db::TyQuery | 232 | hir::db::TyQuery |
@@ -243,12 +238,9 @@ impl RootDatabase { | |||
243 | hir::db::GenericPredicatesForParamQuery | 238 | hir::db::GenericPredicatesForParamQuery |
244 | hir::db::GenericPredicatesQuery | 239 | hir::db::GenericPredicatesQuery |
245 | hir::db::GenericDefaultsQuery | 240 | hir::db::GenericDefaultsQuery |
246 | hir::db::ImplsInCrateQuery | 241 | hir::db::InherentImplsInCrateQuery |
247 | hir::db::ImplsFromDepsQuery | 242 | hir::db::TraitImplsInCrateQuery |
248 | hir::db::InternTypeCtorQuery | 243 | hir::db::TraitImplsInDepsQuery |
249 | hir::db::InternTypeParamIdQuery | ||
250 | hir::db::InternChalkImplQuery | ||
251 | hir::db::InternAssocTyValueQuery | ||
252 | hir::db::AssociatedTyDataQuery | 244 | hir::db::AssociatedTyDataQuery |
253 | hir::db::TraitDatumQuery | 245 | hir::db::TraitDatumQuery |
254 | hir::db::StructDatumQuery | 246 | hir::db::StructDatumQuery |
@@ -263,6 +255,33 @@ impl RootDatabase { | |||
263 | // LineIndexDatabase | 255 | // LineIndexDatabase |
264 | crate::LineIndexQuery | 256 | crate::LineIndexQuery |
265 | ]; | 257 | ]; |
258 | |||
259 | // To collect interned data, we need to bump the revision counter by performing a synthetic | ||
260 | // write. | ||
261 | // We do this after collecting the non-interned queries to correctly attribute memory used | ||
262 | // by interned data. | ||
263 | self.salsa_runtime_mut().synthetic_write(Durability::HIGH); | ||
264 | |||
265 | sweep_each_query![ | ||
266 | // AstDatabase | ||
267 | hir::db::InternMacroQuery | ||
268 | hir::db::InternEagerExpansionQuery | ||
269 | |||
270 | // InternDatabase | ||
271 | hir::db::InternFunctionQuery | ||
272 | hir::db::InternStructQuery | ||
273 | hir::db::InternUnionQuery | ||
274 | hir::db::InternEnumQuery | ||
275 | hir::db::InternConstQuery | ||
276 | hir::db::InternStaticQuery | ||
277 | hir::db::InternTraitQuery | ||
278 | hir::db::InternTypeAliasQuery | ||
279 | hir::db::InternImplQuery | ||
280 | |||
281 | // HirDatabase | ||
282 | hir::db::InternTypeParamIdQuery | ||
283 | ]; | ||
284 | |||
266 | acc.sort_by_key(|it| std::cmp::Reverse(it.1)); | 285 | acc.sort_by_key(|it| std::cmp::Reverse(it.1)); |
267 | acc | 286 | acc |
268 | } | 287 | } |
diff --git a/crates/ra_ide_db/src/defs.rs b/crates/ra_ide_db/src/defs.rs index bc6e89cbc..80c99935d 100644 --- a/crates/ra_ide_db/src/defs.rs +++ b/crates/ra_ide_db/src/defs.rs | |||
@@ -136,7 +136,7 @@ pub fn classify_name(sema: &Semantics<RootDatabase>, name: &ast::Name) -> Option | |||
136 | 136 | ||
137 | match_ast! { | 137 | match_ast! { |
138 | match parent { | 138 | match parent { |
139 | ast::Alias(it) => { | 139 | ast::Rename(it) => { |
140 | let use_tree = it.syntax().parent().and_then(ast::UseTree::cast)?; | 140 | let use_tree = it.syntax().parent().and_then(ast::UseTree::cast)?; |
141 | let path = use_tree.path()?; | 141 | let path = use_tree.path()?; |
142 | let path_segment = path.segment()?; | 142 | let path_segment = path.segment()?; |
@@ -159,7 +159,7 @@ pub fn classify_name(sema: &Semantics<RootDatabase>, name: &ast::Name) -> Option | |||
159 | 159 | ||
160 | Some(NameClass::Definition(Definition::Local(local))) | 160 | Some(NameClass::Definition(Definition::Local(local))) |
161 | }, | 161 | }, |
162 | ast::RecordFieldDef(it) => { | 162 | ast::RecordField(it) => { |
163 | let field: hir::Field = sema.to_def(&it)?; | 163 | let field: hir::Field = sema.to_def(&it)?; |
164 | Some(NameClass::Definition(Definition::Field(field))) | 164 | Some(NameClass::Definition(Definition::Field(field))) |
165 | }, | 165 | }, |
@@ -167,39 +167,39 @@ pub fn classify_name(sema: &Semantics<RootDatabase>, name: &ast::Name) -> Option | |||
167 | let def = sema.to_def(&it)?; | 167 | let def = sema.to_def(&it)?; |
168 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | 168 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) |
169 | }, | 169 | }, |
170 | ast::StructDef(it) => { | 170 | ast::Struct(it) => { |
171 | let def: hir::Struct = sema.to_def(&it)?; | 171 | let def: hir::Struct = sema.to_def(&it)?; |
172 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | 172 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) |
173 | }, | 173 | }, |
174 | ast::UnionDef(it) => { | 174 | ast::Union(it) => { |
175 | let def: hir::Union = sema.to_def(&it)?; | 175 | let def: hir::Union = sema.to_def(&it)?; |
176 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | 176 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) |
177 | }, | 177 | }, |
178 | ast::EnumDef(it) => { | 178 | ast::Enum(it) => { |
179 | let def: hir::Enum = sema.to_def(&it)?; | 179 | let def: hir::Enum = sema.to_def(&it)?; |
180 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | 180 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) |
181 | }, | 181 | }, |
182 | ast::TraitDef(it) => { | 182 | ast::Trait(it) => { |
183 | let def: hir::Trait = sema.to_def(&it)?; | 183 | let def: hir::Trait = sema.to_def(&it)?; |
184 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | 184 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) |
185 | }, | 185 | }, |
186 | ast::StaticDef(it) => { | 186 | ast::Static(it) => { |
187 | let def: hir::Static = sema.to_def(&it)?; | 187 | let def: hir::Static = sema.to_def(&it)?; |
188 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | 188 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) |
189 | }, | 189 | }, |
190 | ast::EnumVariant(it) => { | 190 | ast::Variant(it) => { |
191 | let def: hir::EnumVariant = sema.to_def(&it)?; | 191 | let def: hir::EnumVariant = sema.to_def(&it)?; |
192 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | 192 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) |
193 | }, | 193 | }, |
194 | ast::FnDef(it) => { | 194 | ast::Fn(it) => { |
195 | let def: hir::Function = sema.to_def(&it)?; | 195 | let def: hir::Function = sema.to_def(&it)?; |
196 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | 196 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) |
197 | }, | 197 | }, |
198 | ast::ConstDef(it) => { | 198 | ast::Const(it) => { |
199 | let def: hir::Const = sema.to_def(&it)?; | 199 | let def: hir::Const = sema.to_def(&it)?; |
200 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | 200 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) |
201 | }, | 201 | }, |
202 | ast::TypeAliasDef(it) => { | 202 | ast::TypeAlias(it) => { |
203 | let def: hir::TypeAlias = sema.to_def(&it)?; | 203 | let def: hir::TypeAlias = sema.to_def(&it)?; |
204 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | 204 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) |
205 | }, | 205 | }, |
@@ -253,7 +253,7 @@ pub fn classify_name_ref( | |||
253 | } | 253 | } |
254 | } | 254 | } |
255 | 255 | ||
256 | if let Some(record_field) = ast::RecordField::for_field_name(name_ref) { | 256 | if let Some(record_field) = ast::RecordExprField::for_field_name(name_ref) { |
257 | if let Some((field, local)) = sema.resolve_record_field(&record_field) { | 257 | if let Some((field, local)) = sema.resolve_record_field(&record_field) { |
258 | let field = Definition::Field(field); | 258 | let field = Definition::Field(field); |
259 | let res = match local { | 259 | let res = match local { |
@@ -271,28 +271,61 @@ pub fn classify_name_ref( | |||
271 | } | 271 | } |
272 | } | 272 | } |
273 | 273 | ||
274 | if ast::AssocTypeArg::cast(parent.clone()).is_some() { | ||
275 | // `Trait<Assoc = Ty>` | ||
276 | // ^^^^^ | ||
277 | let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?; | ||
278 | let resolved = sema.resolve_path(&path)?; | ||
279 | if let PathResolution::Def(ModuleDef::Trait(tr)) = resolved { | ||
280 | if let Some(ty) = tr | ||
281 | .items(sema.db) | ||
282 | .iter() | ||
283 | .filter_map(|assoc| match assoc { | ||
284 | hir::AssocItem::TypeAlias(it) => Some(*it), | ||
285 | _ => None, | ||
286 | }) | ||
287 | .find(|alias| alias.name(sema.db).to_string() == **name_ref.text()) | ||
288 | { | ||
289 | return Some(NameRefClass::Definition(Definition::ModuleDef( | ||
290 | ModuleDef::TypeAlias(ty), | ||
291 | ))); | ||
292 | } | ||
293 | } | ||
294 | } | ||
295 | |||
274 | if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) { | 296 | if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) { |
275 | if let Some(macro_def) = sema.resolve_macro_call(¯o_call) { | 297 | if let Some(path) = macro_call.path() { |
276 | return Some(NameRefClass::Definition(Definition::Macro(macro_def))); | 298 | if path.qualifier().is_none() { |
299 | // Only use this to resolve single-segment macro calls like `foo!()`. Multi-segment | ||
300 | // paths are handled below (allowing `log<|>::info!` to resolve to the log crate). | ||
301 | if let Some(macro_def) = sema.resolve_macro_call(¯o_call) { | ||
302 | return Some(NameRefClass::Definition(Definition::Macro(macro_def))); | ||
303 | } | ||
304 | } | ||
277 | } | 305 | } |
278 | } | 306 | } |
279 | 307 | ||
280 | let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?; | 308 | let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?; |
281 | let resolved = sema.resolve_path(&path)?; | 309 | let resolved = sema.resolve_path(&path)?; |
282 | let res = match resolved { | 310 | Some(NameRefClass::Definition(resolved.into())) |
283 | PathResolution::Def(def) => Definition::ModuleDef(def), | 311 | } |
284 | PathResolution::AssocItem(item) => { | 312 | |
285 | let def = match item { | 313 | impl From<PathResolution> for Definition { |
286 | hir::AssocItem::Function(it) => it.into(), | 314 | fn from(path_resolution: PathResolution) -> Self { |
287 | hir::AssocItem::Const(it) => it.into(), | 315 | match path_resolution { |
288 | hir::AssocItem::TypeAlias(it) => it.into(), | 316 | PathResolution::Def(def) => Definition::ModuleDef(def), |
289 | }; | 317 | PathResolution::AssocItem(item) => { |
290 | Definition::ModuleDef(def) | 318 | let def = match item { |
319 | hir::AssocItem::Function(it) => it.into(), | ||
320 | hir::AssocItem::Const(it) => it.into(), | ||
321 | hir::AssocItem::TypeAlias(it) => it.into(), | ||
322 | }; | ||
323 | Definition::ModuleDef(def) | ||
324 | } | ||
325 | PathResolution::Local(local) => Definition::Local(local), | ||
326 | PathResolution::TypeParam(par) => Definition::TypeParam(par), | ||
327 | PathResolution::Macro(def) => Definition::Macro(def), | ||
328 | PathResolution::SelfType(impl_def) => Definition::SelfType(impl_def), | ||
291 | } | 329 | } |
292 | PathResolution::Local(local) => Definition::Local(local), | 330 | } |
293 | PathResolution::TypeParam(par) => Definition::TypeParam(par), | ||
294 | PathResolution::Macro(def) => Definition::Macro(def), | ||
295 | PathResolution::SelfType(impl_def) => Definition::SelfType(impl_def), | ||
296 | }; | ||
297 | Some(NameRefClass::Definition(res)) | ||
298 | } | 331 | } |
diff --git a/crates/ra_ide_db/src/imports_locator.rs b/crates/ra_ide_db/src/imports_locator.rs index fff112e66..1fba71ff8 100644 --- a/crates/ra_ide_db/src/imports_locator.rs +++ b/crates/ra_ide_db/src/imports_locator.rs | |||
@@ -13,57 +13,53 @@ use crate::{ | |||
13 | use either::Either; | 13 | use either::Either; |
14 | use rustc_hash::FxHashSet; | 14 | use rustc_hash::FxHashSet; |
15 | 15 | ||
16 | pub struct ImportsLocator<'a> { | 16 | pub fn find_imports<'a>( |
17 | sema: Semantics<'a, RootDatabase>, | 17 | sema: &Semantics<'a, RootDatabase>, |
18 | krate: Crate, | 18 | krate: Crate, |
19 | } | 19 | name_to_import: &str, |
20 | 20 | ) -> Vec<Either<ModuleDef, MacroDef>> { | |
21 | impl<'a> ImportsLocator<'a> { | 21 | let _p = profile("search_for_imports"); |
22 | pub fn new(db: &'a RootDatabase, krate: Crate) -> Self { | 22 | let db = sema.db; |
23 | Self { sema: Semantics::new(db), krate } | ||
24 | } | ||
25 | 23 | ||
26 | pub fn find_imports(&mut self, name_to_import: &str) -> Vec<Either<ModuleDef, MacroDef>> { | 24 | // Query dependencies first. |
27 | let _p = profile("search_for_imports"); | 25 | let mut candidates: FxHashSet<_> = |
28 | let db = self.sema.db; | 26 | krate.query_external_importables(db, name_to_import).collect(); |
29 | 27 | ||
30 | // Query dependencies first. | 28 | // Query the local crate using the symbol index. |
31 | let mut candidates: FxHashSet<_> = | 29 | let local_results = { |
32 | self.krate.query_external_importables(db, name_to_import).collect(); | 30 | let mut query = Query::new(name_to_import.to_string()); |
31 | query.exact(); | ||
32 | query.limit(40); | ||
33 | symbol_index::crate_symbols(db, krate.into(), query) | ||
34 | }; | ||
33 | 35 | ||
34 | // Query the local crate using the symbol index. | 36 | candidates.extend( |
35 | let local_results = { | 37 | local_results |
36 | let mut query = Query::new(name_to_import.to_string()); | 38 | .into_iter() |
37 | query.exact(); | 39 | .filter_map(|import_candidate| get_name_definition(sema, &import_candidate)) |
38 | query.limit(40); | 40 | .filter_map(|name_definition_to_import| match name_definition_to_import { |
39 | symbol_index::crate_symbols(db, self.krate.into(), query) | 41 | Definition::ModuleDef(module_def) => Some(Either::Left(module_def)), |
40 | }; | 42 | Definition::Macro(macro_def) => Some(Either::Right(macro_def)), |
43 | _ => None, | ||
44 | }), | ||
45 | ); | ||
41 | 46 | ||
42 | candidates.extend( | 47 | candidates.into_iter().collect() |
43 | local_results | 48 | } |
44 | .into_iter() | ||
45 | .filter_map(|import_candidate| self.get_name_definition(&import_candidate)) | ||
46 | .filter_map(|name_definition_to_import| match name_definition_to_import { | ||
47 | Definition::ModuleDef(module_def) => Some(Either::Left(module_def)), | ||
48 | Definition::Macro(macro_def) => Some(Either::Right(macro_def)), | ||
49 | _ => None, | ||
50 | }), | ||
51 | ); | ||
52 | |||
53 | candidates.into_iter().collect() | ||
54 | } | ||
55 | 49 | ||
56 | fn get_name_definition(&mut self, import_candidate: &FileSymbol) -> Option<Definition> { | 50 | fn get_name_definition<'a>( |
57 | let _p = profile("get_name_definition"); | 51 | sema: &Semantics<'a, RootDatabase>, |
58 | let file_id = import_candidate.file_id; | 52 | import_candidate: &FileSymbol, |
53 | ) -> Option<Definition> { | ||
54 | let _p = profile("get_name_definition"); | ||
55 | let file_id = import_candidate.file_id; | ||
59 | 56 | ||
60 | let candidate_node = import_candidate.ptr.to_node(self.sema.parse(file_id).syntax()); | 57 | let candidate_node = import_candidate.ptr.to_node(sema.parse(file_id).syntax()); |
61 | let candidate_name_node = if candidate_node.kind() != NAME { | 58 | let candidate_name_node = if candidate_node.kind() != NAME { |
62 | candidate_node.children().find(|it| it.kind() == NAME)? | 59 | candidate_node.children().find(|it| it.kind() == NAME)? |
63 | } else { | 60 | } else { |
64 | candidate_node | 61 | candidate_node |
65 | }; | 62 | }; |
66 | let name = ast::Name::cast(candidate_name_node)?; | 63 | let name = ast::Name::cast(candidate_name_node)?; |
67 | classify_name(&self.sema, &name)?.into_definition() | 64 | classify_name(sema, &name)?.into_definition() |
68 | } | ||
69 | } | 65 | } |
diff --git a/crates/ra_ide_db/src/lib.rs b/crates/ra_ide_db/src/lib.rs index a808de4f1..6900cac73 100644 --- a/crates/ra_ide_db/src/lib.rs +++ b/crates/ra_ide_db/src/lib.rs | |||
@@ -11,11 +11,11 @@ pub mod imports_locator; | |||
11 | pub mod source_change; | 11 | pub mod source_change; |
12 | mod wasm_shims; | 12 | mod wasm_shims; |
13 | 13 | ||
14 | use std::sync::Arc; | 14 | use std::{fmt, sync::Arc}; |
15 | 15 | ||
16 | use hir::db::{AstDatabase, DefDatabase}; | 16 | use hir::db::{AstDatabase, DefDatabase, HirDatabase}; |
17 | use ra_db::{ | 17 | use ra_db::{ |
18 | salsa::{self, Database, Durability}, | 18 | salsa::{self, Durability}, |
19 | Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, | 19 | Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, |
20 | Upcast, | 20 | Upcast, |
21 | }; | 21 | }; |
@@ -33,13 +33,18 @@ use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase}; | |||
33 | hir::db::DefDatabaseStorage, | 33 | hir::db::DefDatabaseStorage, |
34 | hir::db::HirDatabaseStorage | 34 | hir::db::HirDatabaseStorage |
35 | )] | 35 | )] |
36 | #[derive(Debug)] | ||
37 | pub struct RootDatabase { | 36 | pub struct RootDatabase { |
38 | runtime: salsa::Runtime<RootDatabase>, | 37 | storage: salsa::Storage<RootDatabase>, |
39 | pub last_gc: crate::wasm_shims::Instant, | 38 | pub last_gc: crate::wasm_shims::Instant, |
40 | pub last_gc_check: crate::wasm_shims::Instant, | 39 | pub last_gc_check: crate::wasm_shims::Instant, |
41 | } | 40 | } |
42 | 41 | ||
42 | impl fmt::Debug for RootDatabase { | ||
43 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
44 | f.debug_struct("RootDatabase").finish() | ||
45 | } | ||
46 | } | ||
47 | |||
43 | impl Upcast<dyn AstDatabase> for RootDatabase { | 48 | impl Upcast<dyn AstDatabase> for RootDatabase { |
44 | fn upcast(&self) -> &(dyn AstDatabase + 'static) { | 49 | fn upcast(&self) -> &(dyn AstDatabase + 'static) { |
45 | &*self | 50 | &*self |
@@ -52,6 +57,12 @@ impl Upcast<dyn DefDatabase> for RootDatabase { | |||
52 | } | 57 | } |
53 | } | 58 | } |
54 | 59 | ||
60 | impl Upcast<dyn HirDatabase> for RootDatabase { | ||
61 | fn upcast(&self) -> &(dyn HirDatabase + 'static) { | ||
62 | &*self | ||
63 | } | ||
64 | } | ||
65 | |||
55 | impl FileLoader for RootDatabase { | 66 | impl FileLoader for RootDatabase { |
56 | fn file_text(&self, file_id: FileId) -> Arc<String> { | 67 | fn file_text(&self, file_id: FileId) -> Arc<String> { |
57 | FileLoaderDelegate(self).file_text(file_id) | 68 | FileLoaderDelegate(self).file_text(file_id) |
@@ -65,17 +76,11 @@ impl FileLoader for RootDatabase { | |||
65 | } | 76 | } |
66 | 77 | ||
67 | impl salsa::Database for RootDatabase { | 78 | impl salsa::Database for RootDatabase { |
68 | fn salsa_runtime(&self) -> &salsa::Runtime<RootDatabase> { | ||
69 | &self.runtime | ||
70 | } | ||
71 | fn salsa_runtime_mut(&mut self) -> &mut salsa::Runtime<Self> { | ||
72 | &mut self.runtime | ||
73 | } | ||
74 | fn on_propagated_panic(&self) -> ! { | 79 | fn on_propagated_panic(&self) -> ! { |
75 | Canceled::throw() | 80 | Canceled::throw() |
76 | } | 81 | } |
77 | fn salsa_event(&self, event: impl Fn() -> salsa::Event<RootDatabase>) { | 82 | fn salsa_event(&self, event: salsa::Event) { |
78 | match event().kind { | 83 | match event.kind { |
79 | salsa::EventKind::DidValidateMemoizedValue { .. } | 84 | salsa::EventKind::DidValidateMemoizedValue { .. } |
80 | | salsa::EventKind::WillExecute { .. } => { | 85 | | salsa::EventKind::WillExecute { .. } => { |
81 | self.check_canceled(); | 86 | self.check_canceled(); |
@@ -94,7 +99,7 @@ impl Default for RootDatabase { | |||
94 | impl RootDatabase { | 99 | impl RootDatabase { |
95 | pub fn new(lru_capacity: Option<usize>) -> RootDatabase { | 100 | pub fn new(lru_capacity: Option<usize>) -> RootDatabase { |
96 | let mut db = RootDatabase { | 101 | let mut db = RootDatabase { |
97 | runtime: salsa::Runtime::default(), | 102 | storage: salsa::Storage::default(), |
98 | last_gc: crate::wasm_shims::Instant::now(), | 103 | last_gc: crate::wasm_shims::Instant::now(), |
99 | last_gc_check: crate::wasm_shims::Instant::now(), | 104 | last_gc_check: crate::wasm_shims::Instant::now(), |
100 | }; | 105 | }; |
@@ -107,16 +112,16 @@ impl RootDatabase { | |||
107 | 112 | ||
108 | pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) { | 113 | pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) { |
109 | let lru_capacity = lru_capacity.unwrap_or(ra_db::DEFAULT_LRU_CAP); | 114 | let lru_capacity = lru_capacity.unwrap_or(ra_db::DEFAULT_LRU_CAP); |
110 | self.query_mut(ra_db::ParseQuery).set_lru_capacity(lru_capacity); | 115 | ra_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); |
111 | self.query_mut(hir::db::ParseMacroQuery).set_lru_capacity(lru_capacity); | 116 | hir::db::ParseMacroQuery.in_db_mut(self).set_lru_capacity(lru_capacity); |
112 | self.query_mut(hir::db::MacroExpandQuery).set_lru_capacity(lru_capacity); | 117 | hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(lru_capacity); |
113 | } | 118 | } |
114 | } | 119 | } |
115 | 120 | ||
116 | impl salsa::ParallelDatabase for RootDatabase { | 121 | impl salsa::ParallelDatabase for RootDatabase { |
117 | fn snapshot(&self) -> salsa::Snapshot<RootDatabase> { | 122 | fn snapshot(&self) -> salsa::Snapshot<RootDatabase> { |
118 | salsa::Snapshot::new(RootDatabase { | 123 | salsa::Snapshot::new(RootDatabase { |
119 | runtime: self.runtime.snapshot(self), | 124 | storage: self.storage.snapshot(), |
120 | last_gc: self.last_gc, | 125 | last_gc: self.last_gc, |
121 | last_gc_check: self.last_gc_check, | 126 | last_gc_check: self.last_gc_check, |
122 | }) | 127 | }) |
@@ -128,7 +133,7 @@ pub trait LineIndexDatabase: ra_db::SourceDatabase + CheckCanceled { | |||
128 | fn line_index(&self, file_id: FileId) -> Arc<LineIndex>; | 133 | fn line_index(&self, file_id: FileId) -> Arc<LineIndex>; |
129 | } | 134 | } |
130 | 135 | ||
131 | fn line_index(db: &impl LineIndexDatabase, file_id: FileId) -> Arc<LineIndex> { | 136 | fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc<LineIndex> { |
132 | let text = db.file_text(file_id); | 137 | let text = db.file_text(file_id); |
133 | Arc::new(LineIndex::new(&*text)) | 138 | Arc::new(LineIndex::new(&*text)) |
134 | } | 139 | } |
diff --git a/crates/ra_ide_db/src/line_index.rs b/crates/ra_ide_db/src/line_index.rs index c7c744fce..2ab662098 100644 --- a/crates/ra_ide_db/src/line_index.rs +++ b/crates/ra_ide_db/src/line_index.rs | |||
@@ -4,7 +4,7 @@ use std::iter; | |||
4 | 4 | ||
5 | use ra_syntax::{TextRange, TextSize}; | 5 | use ra_syntax::{TextRange, TextSize}; |
6 | use rustc_hash::FxHashMap; | 6 | use rustc_hash::FxHashMap; |
7 | use superslice::Ext; | 7 | use stdx::partition_point; |
8 | 8 | ||
9 | #[derive(Clone, Debug, PartialEq, Eq)] | 9 | #[derive(Clone, Debug, PartialEq, Eq)] |
10 | pub struct LineIndex { | 10 | pub struct LineIndex { |
@@ -89,7 +89,7 @@ impl LineIndex { | |||
89 | } | 89 | } |
90 | 90 | ||
91 | pub fn line_col(&self, offset: TextSize) -> LineCol { | 91 | pub fn line_col(&self, offset: TextSize) -> LineCol { |
92 | let line = self.newlines.upper_bound(&offset) - 1; | 92 | let line = partition_point(&self.newlines, |&it| it <= offset) - 1; |
93 | let line_start_offset = self.newlines[line]; | 93 | let line_start_offset = self.newlines[line]; |
94 | let col = offset - line_start_offset; | 94 | let col = offset - line_start_offset; |
95 | 95 | ||
@@ -103,8 +103,8 @@ impl LineIndex { | |||
103 | } | 103 | } |
104 | 104 | ||
105 | pub fn lines(&self, range: TextRange) -> impl Iterator<Item = TextRange> + '_ { | 105 | pub fn lines(&self, range: TextRange) -> impl Iterator<Item = TextRange> + '_ { |
106 | let lo = self.newlines.lower_bound(&range.start()); | 106 | let lo = partition_point(&self.newlines, |&it| it < range.start()); |
107 | let hi = self.newlines.upper_bound(&range.end()); | 107 | let hi = partition_point(&self.newlines, |&it| it <= range.end()); |
108 | let all = iter::once(range.start()) | 108 | let all = iter::once(range.start()) |
109 | .chain(self.newlines[lo..hi].iter().copied()) | 109 | .chain(self.newlines[lo..hi].iter().copied()) |
110 | .chain(iter::once(range.end())); | 110 | .chain(iter::once(range.end())); |
diff --git a/crates/ra_ide_db/src/search.rs b/crates/ra_ide_db/src/search.rs index 44d5c35e6..0b862b449 100644 --- a/crates/ra_ide_db/src/search.rs +++ b/crates/ra_ide_db/src/search.rs | |||
@@ -60,6 +60,10 @@ impl SearchScope { | |||
60 | SearchScope::new(std::iter::once((file, None)).collect()) | 60 | SearchScope::new(std::iter::once((file, None)).collect()) |
61 | } | 61 | } |
62 | 62 | ||
63 | pub fn files(files: &[FileId]) -> SearchScope { | ||
64 | SearchScope::new(files.iter().map(|f| (*f, None)).collect()) | ||
65 | } | ||
66 | |||
63 | pub fn intersection(&self, other: &SearchScope) -> SearchScope { | 67 | pub fn intersection(&self, other: &SearchScope) -> SearchScope { |
64 | let (mut small, mut large) = (&self.entries, &other.entries); | 68 | let (mut small, mut large) = (&self.entries, &other.entries); |
65 | if small.len() > large.len() { | 69 | if small.len() > large.len() { |
@@ -180,20 +184,20 @@ impl Definition { | |||
180 | 184 | ||
181 | pub fn find_usages( | 185 | pub fn find_usages( |
182 | &self, | 186 | &self, |
183 | db: &RootDatabase, | 187 | sema: &Semantics<RootDatabase>, |
184 | search_scope: Option<SearchScope>, | 188 | search_scope: Option<SearchScope>, |
185 | ) -> Vec<Reference> { | 189 | ) -> Vec<Reference> { |
186 | let _p = profile("Definition::find_usages"); | 190 | let _p = profile("Definition::find_usages"); |
187 | 191 | ||
188 | let search_scope = { | 192 | let search_scope = { |
189 | let base = self.search_scope(db); | 193 | let base = self.search_scope(sema.db); |
190 | match search_scope { | 194 | match search_scope { |
191 | None => base, | 195 | None => base, |
192 | Some(scope) => base.intersection(&scope), | 196 | Some(scope) => base.intersection(&scope), |
193 | } | 197 | } |
194 | }; | 198 | }; |
195 | 199 | ||
196 | let name = match self.name(db) { | 200 | let name = match self.name(sema.db) { |
197 | None => return Vec::new(), | 201 | None => return Vec::new(), |
198 | Some(it) => it.to_string(), | 202 | Some(it) => it.to_string(), |
199 | }; | 203 | }; |
@@ -202,11 +206,10 @@ impl Definition { | |||
202 | let mut refs = vec![]; | 206 | let mut refs = vec![]; |
203 | 207 | ||
204 | for (file_id, search_range) in search_scope { | 208 | for (file_id, search_range) in search_scope { |
205 | let text = db.file_text(file_id); | 209 | let text = sema.db.file_text(file_id); |
206 | let search_range = | 210 | let search_range = |
207 | search_range.unwrap_or(TextRange::up_to(TextSize::of(text.as_str()))); | 211 | search_range.unwrap_or(TextRange::up_to(TextSize::of(text.as_str()))); |
208 | 212 | ||
209 | let sema = Semantics::new(db); | ||
210 | let tree = Lazy::new(|| sema.parse(file_id).syntax().clone()); | 213 | let tree = Lazy::new(|| sema.parse(file_id).syntax().clone()); |
211 | 214 | ||
212 | for (idx, _) in text.match_indices(pat) { | 215 | for (idx, _) in text.match_indices(pat) { |
@@ -222,9 +225,6 @@ impl Definition { | |||
222 | continue; | 225 | continue; |
223 | }; | 226 | }; |
224 | 227 | ||
225 | // FIXME: reuse sb | ||
226 | // See https://github.com/rust-lang/rust/pull/68198#issuecomment-574269098 | ||
227 | |||
228 | match classify_name_ref(&sema, &name_ref) { | 228 | match classify_name_ref(&sema, &name_ref) { |
229 | Some(NameRefClass::Definition(def)) if &def == self => { | 229 | Some(NameRefClass::Definition(def)) if &def == self => { |
230 | let kind = if is_record_lit_name_ref(&name_ref) | 230 | let kind = if is_record_lit_name_ref(&name_ref) |
@@ -315,7 +315,7 @@ fn is_record_lit_name_ref(name_ref: &ast::NameRef) -> bool { | |||
315 | name_ref | 315 | name_ref |
316 | .syntax() | 316 | .syntax() |
317 | .ancestors() | 317 | .ancestors() |
318 | .find_map(ast::RecordLit::cast) | 318 | .find_map(ast::RecordExpr::cast) |
319 | .and_then(|l| l.path()) | 319 | .and_then(|l| l.path()) |
320 | .and_then(|p| p.segment()) | 320 | .and_then(|p| p.segment()) |
321 | .map(|p| p.name_ref().as_ref() == Some(name_ref)) | 321 | .map(|p| p.name_ref().as_ref() == Some(name_ref)) |
diff --git a/crates/ra_ide_db/src/source_change.rs b/crates/ra_ide_db/src/source_change.rs index 0bbd3c3e5..abb83f421 100644 --- a/crates/ra_ide_db/src/source_change.rs +++ b/crates/ra_ide_db/src/source_change.rs | |||
@@ -6,7 +6,7 @@ | |||
6 | use ra_db::FileId; | 6 | use ra_db::FileId; |
7 | use ra_text_edit::TextEdit; | 7 | use ra_text_edit::TextEdit; |
8 | 8 | ||
9 | #[derive(Debug, Clone)] | 9 | #[derive(Default, Debug, Clone)] |
10 | pub struct SourceChange { | 10 | pub struct SourceChange { |
11 | pub source_file_edits: Vec<SourceFileEdit>, | 11 | pub source_file_edits: Vec<SourceFileEdit>, |
12 | pub file_system_edits: Vec<FileSystemEdit>, | 12 | pub file_system_edits: Vec<FileSystemEdit>, |
diff --git a/crates/ra_ide_db/src/symbol_index.rs b/crates/ra_ide_db/src/symbol_index.rs index 5a09e7d1d..35a2c5be3 100644 --- a/crates/ra_ide_db/src/symbol_index.rs +++ b/crates/ra_ide_db/src/symbol_index.rs | |||
@@ -87,7 +87,7 @@ impl Query { | |||
87 | } | 87 | } |
88 | 88 | ||
89 | #[salsa::query_group(SymbolsDatabaseStorage)] | 89 | #[salsa::query_group(SymbolsDatabaseStorage)] |
90 | pub trait SymbolsDatabase: hir::db::HirDatabase + SourceDatabaseExt + ParallelDatabase { | 90 | pub trait SymbolsDatabase: hir::db::HirDatabase + SourceDatabaseExt { |
91 | fn file_symbols(&self, file_id: FileId) -> Arc<SymbolIndex>; | 91 | fn file_symbols(&self, file_id: FileId) -> Arc<SymbolIndex>; |
92 | fn library_symbols(&self) -> Arc<FxHashMap<SourceRootId, SymbolIndex>>; | 92 | fn library_symbols(&self) -> Arc<FxHashMap<SourceRootId, SymbolIndex>>; |
93 | /// The set of "local" (that is, from the current workspace) roots. | 93 | /// The set of "local" (that is, from the current workspace) roots. |
@@ -100,9 +100,7 @@ pub trait SymbolsDatabase: hir::db::HirDatabase + SourceDatabaseExt + ParallelDa | |||
100 | fn library_roots(&self) -> Arc<FxHashSet<SourceRootId>>; | 100 | fn library_roots(&self) -> Arc<FxHashSet<SourceRootId>>; |
101 | } | 101 | } |
102 | 102 | ||
103 | fn library_symbols( | 103 | fn library_symbols(db: &dyn SymbolsDatabase) -> Arc<FxHashMap<SourceRootId, SymbolIndex>> { |
104 | db: &(impl SymbolsDatabase + ParallelDatabase), | ||
105 | ) -> Arc<FxHashMap<SourceRootId, SymbolIndex>> { | ||
106 | let _p = profile("library_symbols"); | 104 | let _p = profile("library_symbols"); |
107 | 105 | ||
108 | let roots = db.library_roots(); | 106 | let roots = db.library_roots(); |
@@ -123,7 +121,7 @@ fn library_symbols( | |||
123 | Arc::new(res) | 121 | Arc::new(res) |
124 | } | 122 | } |
125 | 123 | ||
126 | fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> { | 124 | fn file_symbols(db: &dyn SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> { |
127 | db.check_canceled(); | 125 | db.check_canceled(); |
128 | let parse = db.parse(file_id); | 126 | let parse = db.parse(file_id); |
129 | 127 | ||
@@ -346,7 +344,7 @@ impl Query { | |||
346 | } | 344 | } |
347 | 345 | ||
348 | fn is_type(kind: SyntaxKind) -> bool { | 346 | fn is_type(kind: SyntaxKind) -> bool { |
349 | matches!(kind, STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_ALIAS_DEF) | 347 | matches!(kind, STRUCT | ENUM | TRAIT | TYPE_ALIAS) |
350 | } | 348 | } |
351 | 349 | ||
352 | /// The actual data that is stored in the index. It should be as compact as | 350 | /// The actual data that is stored in the index. It should be as compact as |
@@ -399,14 +397,14 @@ fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { | |||
399 | } | 397 | } |
400 | match_ast! { | 398 | match_ast! { |
401 | match node { | 399 | match node { |
402 | ast::FnDef(it) => decl(it), | 400 | ast::Fn(it) => decl(it), |
403 | ast::StructDef(it) => decl(it), | 401 | ast::Struct(it) => decl(it), |
404 | ast::EnumDef(it) => decl(it), | 402 | ast::Enum(it) => decl(it), |
405 | ast::TraitDef(it) => decl(it), | 403 | ast::Trait(it) => decl(it), |
406 | ast::Module(it) => decl(it), | 404 | ast::Module(it) => decl(it), |
407 | ast::TypeAliasDef(it) => decl(it), | 405 | ast::TypeAlias(it) => decl(it), |
408 | ast::ConstDef(it) => decl(it), | 406 | ast::Const(it) => decl(it), |
409 | ast::StaticDef(it) => decl(it), | 407 | ast::Static(it) => decl(it), |
410 | ast::MacroCall(it) => { | 408 | ast::MacroCall(it) => { |
411 | if it.is_macro_rules().is_some() { | 409 | if it.is_macro_rules().is_some() { |
412 | decl(it) | 410 | decl(it) |