diff options
Diffstat (limited to 'crates')
-rw-r--r-- | crates/ra_analysis/Cargo.toml | 1 | ||||
-rw-r--r-- | crates/ra_analysis/src/symbol_index.rs | 56 | ||||
-rw-r--r-- | crates/ra_analysis/tests/test/main.rs | 19 | ||||
-rw-r--r-- | crates/ra_hir/src/ids.rs | 7 | ||||
-rw-r--r-- | crates/ra_hir/src/module/nameres.rs | 8 | ||||
-rw-r--r-- | crates/ra_hir/src/source_binder.rs | 39 |
6 files changed, 102 insertions, 28 deletions
diff --git a/crates/ra_analysis/Cargo.toml b/crates/ra_analysis/Cargo.toml index c0174cdc5..7a4fdaed9 100644 --- a/crates/ra_analysis/Cargo.toml +++ b/crates/ra_analysis/Cargo.toml | |||
@@ -13,6 +13,7 @@ fst = "0.3.1" | |||
13 | salsa = "0.9.0" | 13 | salsa = "0.9.0" |
14 | rustc-hash = "1.0" | 14 | rustc-hash = "1.0" |
15 | parking_lot = "0.7.0" | 15 | parking_lot = "0.7.0" |
16 | unicase = "2.2.0" | ||
16 | 17 | ||
17 | ra_syntax = { path = "../ra_syntax" } | 18 | ra_syntax = { path = "../ra_syntax" } |
18 | ra_editor = { path = "../ra_editor" } | 19 | ra_editor = { path = "../ra_editor" } |
diff --git a/crates/ra_analysis/src/symbol_index.rs b/crates/ra_analysis/src/symbol_index.rs index b355b14ed..e2b1c88fe 100644 --- a/crates/ra_analysis/src/symbol_index.rs +++ b/crates/ra_analysis/src/symbol_index.rs | |||
@@ -20,6 +20,7 @@ | |||
20 | //! file in the current workspace, and run a query aginst the union of all | 20 | //! file in the current workspace, and run a query aginst the union of all |
21 | //! thouse fsts. | 21 | //! thouse fsts. |
22 | use std::{ | 22 | use std::{ |
23 | cmp::Ordering, | ||
23 | hash::{Hash, Hasher}, | 24 | hash::{Hash, Hasher}, |
24 | sync::Arc, | 25 | sync::Arc, |
25 | }; | 26 | }; |
@@ -27,11 +28,11 @@ use std::{ | |||
27 | use fst::{self, Streamer}; | 28 | use fst::{self, Streamer}; |
28 | use ra_syntax::{ | 29 | use ra_syntax::{ |
29 | SyntaxNodeRef, SourceFileNode, SmolStr, | 30 | SyntaxNodeRef, SourceFileNode, SmolStr, |
30 | algo::visit::{visitor, Visitor}, | 31 | algo::{visit::{visitor, Visitor}, find_covering_node}, |
31 | SyntaxKind::{self, *}, | 32 | SyntaxKind::{self, *}, |
32 | ast::{self, NameOwner}, | 33 | ast::{self, NameOwner}, |
33 | }; | 34 | }; |
34 | use ra_db::{SyntaxDatabase, SourceRootId, FilesDatabase, LocalSyntaxPtr}; | 35 | use ra_db::{SourceRootId, FilesDatabase, LocalSyntaxPtr}; |
35 | use salsa::ParallelDatabase; | 36 | use salsa::ParallelDatabase; |
36 | use rayon::prelude::*; | 37 | use rayon::prelude::*; |
37 | 38 | ||
@@ -41,7 +42,7 @@ use crate::{ | |||
41 | }; | 42 | }; |
42 | 43 | ||
43 | salsa::query_group! { | 44 | salsa::query_group! { |
44 | pub(crate) trait SymbolsDatabase: SyntaxDatabase { | 45 | pub(crate) trait SymbolsDatabase: hir::db::HirDatabase { |
45 | fn file_symbols(file_id: FileId) -> Cancelable<Arc<SymbolIndex>> { | 46 | fn file_symbols(file_id: FileId) -> Cancelable<Arc<SymbolIndex>> { |
46 | type FileSymbolsQuery; | 47 | type FileSymbolsQuery; |
47 | } | 48 | } |
@@ -52,10 +53,23 @@ salsa::query_group! { | |||
52 | } | 53 | } |
53 | } | 54 | } |
54 | 55 | ||
55 | fn file_symbols(db: &impl SyntaxDatabase, file_id: FileId) -> Cancelable<Arc<SymbolIndex>> { | 56 | fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Cancelable<Arc<SymbolIndex>> { |
56 | db.check_canceled()?; | 57 | db.check_canceled()?; |
57 | let syntax = db.source_file(file_id); | 58 | let source_file = db.source_file(file_id); |
58 | Ok(Arc::new(SymbolIndex::for_file(file_id, syntax))) | 59 | let mut symbols = source_file |
60 | .syntax() | ||
61 | .descendants() | ||
62 | .filter_map(to_symbol) | ||
63 | .map(move |(name, ptr)| FileSymbol { name, ptr, file_id }) | ||
64 | .collect::<Vec<_>>(); | ||
65 | |||
66 | for (name, text_range) in hir::source_binder::macro_symbols(db, file_id)? { | ||
67 | let node = find_covering_node(source_file.syntax(), text_range); | ||
68 | let ptr = LocalSyntaxPtr::new(node); | ||
69 | symbols.push(FileSymbol { file_id, name, ptr }) | ||
70 | } | ||
71 | |||
72 | Ok(Arc::new(SymbolIndex::new(symbols))) | ||
59 | } | 73 | } |
60 | 74 | ||
61 | pub(crate) fn world_symbols(db: &RootDatabase, query: Query) -> Cancelable<Vec<FileSymbol>> { | 75 | pub(crate) fn world_symbols(db: &RootDatabase, query: Query) -> Cancelable<Vec<FileSymbol>> { |
@@ -111,6 +125,17 @@ impl Hash for SymbolIndex { | |||
111 | } | 125 | } |
112 | 126 | ||
113 | impl SymbolIndex { | 127 | impl SymbolIndex { |
128 | fn new(mut symbols: Vec<FileSymbol>) -> SymbolIndex { | ||
129 | fn cmp(s1: &FileSymbol, s2: &FileSymbol) -> Ordering { | ||
130 | unicase::Ascii::new(s1.name.as_str()).cmp(&unicase::Ascii::new(s2.name.as_str())) | ||
131 | } | ||
132 | symbols.par_sort_by(cmp); | ||
133 | symbols.dedup_by(|s1, s2| cmp(s1, s2) == Ordering::Equal); | ||
134 | let names = symbols.iter().map(|it| it.name.as_str().to_lowercase()); | ||
135 | let map = fst::Map::from_iter(names.into_iter().zip(0u64..)).unwrap(); | ||
136 | SymbolIndex { symbols, map } | ||
137 | } | ||
138 | |||
114 | pub(crate) fn len(&self) -> usize { | 139 | pub(crate) fn len(&self) -> usize { |
115 | self.symbols.len() | 140 | self.symbols.len() |
116 | } | 141 | } |
@@ -118,29 +143,16 @@ impl SymbolIndex { | |||
118 | pub(crate) fn for_files( | 143 | pub(crate) fn for_files( |
119 | files: impl ParallelIterator<Item = (FileId, SourceFileNode)>, | 144 | files: impl ParallelIterator<Item = (FileId, SourceFileNode)>, |
120 | ) -> SymbolIndex { | 145 | ) -> SymbolIndex { |
121 | let mut symbols = files | 146 | let symbols = files |
122 | .flat_map(|(file_id, file)| { | 147 | .flat_map(|(file_id, file)| { |
123 | file.syntax() | 148 | file.syntax() |
124 | .descendants() | 149 | .descendants() |
125 | .filter_map(to_symbol) | 150 | .filter_map(to_symbol) |
126 | .map(move |(name, ptr)| { | 151 | .map(move |(name, ptr)| FileSymbol { name, ptr, file_id }) |
127 | ( | ||
128 | name.as_str().to_lowercase(), | ||
129 | FileSymbol { name, ptr, file_id }, | ||
130 | ) | ||
131 | }) | ||
132 | .collect::<Vec<_>>() | 152 | .collect::<Vec<_>>() |
133 | }) | 153 | }) |
134 | .collect::<Vec<_>>(); | 154 | .collect::<Vec<_>>(); |
135 | symbols.par_sort_by(|s1, s2| s1.0.cmp(&s2.0)); | 155 | SymbolIndex::new(symbols) |
136 | symbols.dedup_by(|s1, s2| s1.0 == s2.0); | ||
137 | let (names, symbols): (Vec<String>, Vec<FileSymbol>) = symbols.into_iter().unzip(); | ||
138 | let map = fst::Map::from_iter(names.into_iter().zip(0u64..)).unwrap(); | ||
139 | SymbolIndex { symbols, map } | ||
140 | } | ||
141 | |||
142 | pub(crate) fn for_file(file_id: FileId, file: SourceFileNode) -> SymbolIndex { | ||
143 | SymbolIndex::for_files(rayon::iter::once((file_id, file))) | ||
144 | } | 156 | } |
145 | } | 157 | } |
146 | 158 | ||
diff --git a/crates/ra_analysis/tests/test/main.rs b/crates/ra_analysis/tests/test/main.rs index 23a5799b9..bfdf8aef2 100644 --- a/crates/ra_analysis/tests/test/main.rs +++ b/crates/ra_analysis/tests/test/main.rs | |||
@@ -6,7 +6,7 @@ use test_utils::{assert_eq_dbg, assert_eq_text}; | |||
6 | 6 | ||
7 | use ra_analysis::{ | 7 | use ra_analysis::{ |
8 | mock_analysis::{analysis_and_position, single_file, single_file_with_position, MockAnalysis}, | 8 | mock_analysis::{analysis_and_position, single_file, single_file_with_position, MockAnalysis}, |
9 | AnalysisChange, CrateGraph, FileId, FnSignatureInfo, | 9 | AnalysisChange, CrateGraph, FileId, FnSignatureInfo, Query |
10 | }; | 10 | }; |
11 | 11 | ||
12 | fn get_signature(text: &str) -> (FnSignatureInfo, Option<usize>) { | 12 | fn get_signature(text: &str) -> (FnSignatureInfo, Option<usize>) { |
@@ -531,6 +531,7 @@ fn test_rename_for_mut_param() { | |||
531 | }"#, | 531 | }"#, |
532 | ); | 532 | ); |
533 | } | 533 | } |
534 | |||
534 | fn test_rename(text: &str, new_name: &str, expected: &str) { | 535 | fn test_rename(text: &str, new_name: &str, expected: &str) { |
535 | let (analysis, position) = single_file_with_position(text); | 536 | let (analysis, position) = single_file_with_position(text); |
536 | let edits = analysis.rename(position, new_name).unwrap(); | 537 | let edits = analysis.rename(position, new_name).unwrap(); |
@@ -547,3 +548,19 @@ fn test_rename(text: &str, new_name: &str, expected: &str) { | |||
547 | .apply(&*analysis.file_text(file_id.unwrap())); | 548 | .apply(&*analysis.file_text(file_id.unwrap())); |
548 | assert_eq_text!(expected, &*result); | 549 | assert_eq_text!(expected, &*result); |
549 | } | 550 | } |
551 | |||
552 | #[test] | ||
553 | fn world_symbols_include_stuff_from_macros() { | ||
554 | let (analysis, _) = single_file( | ||
555 | " | ||
556 | salsa::query_group! { | ||
557 | pub trait HirDatabase: SyntaxDatabase {} | ||
558 | } | ||
559 | ", | ||
560 | ); | ||
561 | |||
562 | let mut symbols = analysis.symbol_search(Query::new("Hir".into())).unwrap(); | ||
563 | let s = symbols.pop().unwrap(); | ||
564 | assert_eq!(s.name(), "HirDatabase"); | ||
565 | assert_eq!(s.range(), TextRange::from_to(33.into(), 44.into())); | ||
566 | } | ||
diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs index a09dee8b1..4c7ebe3ea 100644 --- a/crates/ra_hir/src/ids.rs +++ b/crates/ra_hir/src/ids.rs | |||
@@ -48,6 +48,13 @@ impl HirFileId { | |||
48 | } | 48 | } |
49 | } | 49 | } |
50 | 50 | ||
51 | pub(crate) fn as_macro_call_id(self) -> Option<MacroCallId> { | ||
52 | match self.0 { | ||
53 | HirFileIdRepr::Macro(it) => Some(it), | ||
54 | _ => None, | ||
55 | } | ||
56 | } | ||
57 | |||
51 | pub(crate) fn hir_source_file(db: &impl HirDatabase, file_id: HirFileId) -> SourceFileNode { | 58 | pub(crate) fn hir_source_file(db: &impl HirDatabase, file_id: HirFileId) -> SourceFileNode { |
52 | match file_id.0 { | 59 | match file_id.0 { |
53 | HirFileIdRepr::File(file_id) => db.source_file(file_id), | 60 | HirFileIdRepr::File(file_id) => db.source_file(file_id), |
diff --git a/crates/ra_hir/src/module/nameres.rs b/crates/ra_hir/src/module/nameres.rs index 40aa33ffa..8d1209626 100644 --- a/crates/ra_hir/src/module/nameres.rs +++ b/crates/ra_hir/src/module/nameres.rs | |||
@@ -64,14 +64,14 @@ impl ModuleScope { | |||
64 | /// running name resolution. | 64 | /// running name resolution. |
65 | #[derive(Debug, Default, PartialEq, Eq)] | 65 | #[derive(Debug, Default, PartialEq, Eq)] |
66 | pub struct InputModuleItems { | 66 | pub struct InputModuleItems { |
67 | items: Vec<ModuleItem>, | 67 | pub(crate) items: Vec<ModuleItem>, |
68 | imports: Vec<Import>, | 68 | imports: Vec<Import>, |
69 | } | 69 | } |
70 | 70 | ||
71 | #[derive(Debug, PartialEq, Eq)] | 71 | #[derive(Debug, PartialEq, Eq)] |
72 | struct ModuleItem { | 72 | pub(crate) struct ModuleItem { |
73 | id: SourceItemId, | 73 | pub(crate) id: SourceItemId, |
74 | name: Name, | 74 | pub(crate) name: Name, |
75 | kind: SyntaxKind, | 75 | kind: SyntaxKind, |
76 | vis: Vis, | 76 | vis: Vis, |
77 | } | 77 | } |
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index 24490d119..85bd84469 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs | |||
@@ -8,8 +8,8 @@ | |||
8 | use ra_db::{FileId, FilePosition, Cancelable}; | 8 | use ra_db::{FileId, FilePosition, Cancelable}; |
9 | use ra_editor::find_node_at_offset; | 9 | use ra_editor::find_node_at_offset; |
10 | use ra_syntax::{ | 10 | use ra_syntax::{ |
11 | SmolStr, TextRange, SyntaxNodeRef, | ||
11 | ast::{self, AstNode, NameOwner}, | 12 | ast::{self, AstNode, NameOwner}, |
12 | SyntaxNodeRef, | ||
13 | }; | 13 | }; |
14 | 14 | ||
15 | use crate::{ | 15 | use crate::{ |
@@ -126,3 +126,40 @@ pub fn function_from_child_node( | |||
126 | let fn_def = ctry!(node.ancestors().find_map(ast::FnDef::cast)); | 126 | let fn_def = ctry!(node.ancestors().find_map(ast::FnDef::cast)); |
127 | function_from_source(db, file_id, fn_def) | 127 | function_from_source(db, file_id, fn_def) |
128 | } | 128 | } |
129 | |||
130 | pub fn macro_symbols( | ||
131 | db: &impl HirDatabase, | ||
132 | file_id: FileId, | ||
133 | ) -> Cancelable<Vec<(SmolStr, TextRange)>> { | ||
134 | let module = match module_from_file_id(db, file_id)? { | ||
135 | Some(it) => it, | ||
136 | None => return Ok(Vec::new()), | ||
137 | }; | ||
138 | let items = db.input_module_items(module.source_root_id, module.module_id)?; | ||
139 | let mut res = Vec::new(); | ||
140 | |||
141 | for macro_call_id in items | ||
142 | .items | ||
143 | .iter() | ||
144 | .filter_map(|it| it.id.file_id.as_macro_call_id()) | ||
145 | { | ||
146 | if let Some(exp) = db.expand_macro_invocation(macro_call_id) { | ||
147 | let loc = macro_call_id.loc(db); | ||
148 | let syntax = db.file_item(loc.source_item_id); | ||
149 | let syntax = syntax.borrowed(); | ||
150 | let macro_call = ast::MacroCall::cast(syntax).unwrap(); | ||
151 | let off = macro_call.token_tree().unwrap().syntax().range().start(); | ||
152 | let file = exp.file(); | ||
153 | for trait_def in file.syntax().descendants().filter_map(ast::TraitDef::cast) { | ||
154 | if let Some(name) = trait_def.name() { | ||
155 | let dst_range = name.syntax().range(); | ||
156 | if let Some(src_range) = exp.map_range_back(dst_range) { | ||
157 | res.push((name.text(), src_range + off)) | ||
158 | } | ||
159 | } | ||
160 | } | ||
161 | } | ||
162 | } | ||
163 | |||
164 | Ok(res) | ||
165 | } | ||