diff options
Diffstat (limited to 'crates/ra_analysis/src')
-rw-r--r-- | crates/ra_analysis/src/completion.rs | 19 | ||||
-rw-r--r-- | crates/ra_analysis/src/completion/complete_keyword.rs | 67 | ||||
-rw-r--r-- | crates/ra_analysis/src/completion/complete_scope.rs | 2 | ||||
-rw-r--r-- | crates/ra_analysis/src/db.rs | 16 | ||||
-rw-r--r-- | crates/ra_analysis/src/extend_selection.rs | 29 | ||||
-rw-r--r-- | crates/ra_analysis/src/imp.rs | 462 | ||||
-rw-r--r-- | crates/ra_analysis/src/lib.rs | 222 | ||||
-rw-r--r-- | crates/ra_analysis/src/macros.rs | 75 | ||||
-rw-r--r-- | crates/ra_analysis/src/runnables.rs | 86 | ||||
-rw-r--r-- | crates/ra_analysis/src/symbol_index.rs | 155 | ||||
-rw-r--r-- | crates/ra_analysis/src/syntax_highlighting.rs | 42 |
11 files changed, 743 insertions, 432 deletions
diff --git a/crates/ra_analysis/src/completion.rs b/crates/ra_analysis/src/completion.rs index fe580700f..ce777a771 100644 --- a/crates/ra_analysis/src/completion.rs +++ b/crates/ra_analysis/src/completion.rs | |||
@@ -28,7 +28,21 @@ pub use crate::completion::completion_item::{CompletionItem, InsertText, Complet | |||
28 | /// incomplete and can look really weird. | 28 | /// incomplete and can look really weird. |
29 | /// | 29 | /// |
30 | /// Once the context is collected, we run a series of completion routines which | 30 | /// Once the context is collected, we run a series of completion routines which |
31 | /// look at the context and produce completion items. | 31 | /// look at the context and produce completion items. One subtelty about this |
32 | /// phase is that completion engine should not filter by the substring which is | ||
33 | /// already present, it should give all possible variants for the identifier at | ||
34 | /// the caret. In other words, for | ||
35 | /// | ||
36 | /// ```no-run | ||
37 | /// fn f() { | ||
38 | /// let foo = 92; | ||
39 | /// let _ = bar<|> | ||
40 | /// } | ||
41 | /// ``` | ||
42 | /// | ||
43 | /// `foo` *should* be present among the completion variants. Filtering by | ||
44 | /// identifier prefix/fuzzy match should be done higher in the stack, together | ||
45 | /// with ordering of completions (currently this is done by the client). | ||
32 | pub(crate) fn completions( | 46 | pub(crate) fn completions( |
33 | db: &db::RootDatabase, | 47 | db: &db::RootDatabase, |
34 | position: FilePosition, | 48 | position: FilePosition, |
@@ -40,6 +54,7 @@ pub(crate) fn completions( | |||
40 | 54 | ||
41 | complete_fn_param::complete_fn_param(&mut acc, &ctx); | 55 | complete_fn_param::complete_fn_param(&mut acc, &ctx); |
42 | complete_keyword::complete_expr_keyword(&mut acc, &ctx); | 56 | complete_keyword::complete_expr_keyword(&mut acc, &ctx); |
57 | complete_keyword::complete_use_tree_keyword(&mut acc, &ctx); | ||
43 | complete_snippet::complete_expr_snippet(&mut acc, &ctx); | 58 | complete_snippet::complete_expr_snippet(&mut acc, &ctx); |
44 | complete_snippet::complete_item_snippet(&mut acc, &ctx); | 59 | complete_snippet::complete_item_snippet(&mut acc, &ctx); |
45 | complete_path::complete_path(&mut acc, &ctx)?; | 60 | complete_path::complete_path(&mut acc, &ctx)?; |
@@ -57,6 +72,6 @@ fn check_completion(code: &str, expected_completions: &str, kind: CompletionKind | |||
57 | } else { | 72 | } else { |
58 | single_file_with_position(code) | 73 | single_file_with_position(code) |
59 | }; | 74 | }; |
60 | let completions = completions(&analysis.imp.db, position).unwrap().unwrap(); | 75 | let completions = completions(&analysis.db, position).unwrap().unwrap(); |
61 | completions.assert_match(expected_completions, kind); | 76 | completions.assert_match(expected_completions, kind); |
62 | } | 77 | } |
diff --git a/crates/ra_analysis/src/completion/complete_keyword.rs b/crates/ra_analysis/src/completion/complete_keyword.rs index d70fdaada..28194c908 100644 --- a/crates/ra_analysis/src/completion/complete_keyword.rs +++ b/crates/ra_analysis/src/completion/complete_keyword.rs | |||
@@ -7,6 +7,38 @@ use ra_syntax::{ | |||
7 | 7 | ||
8 | use crate::completion::{CompletionContext, CompletionItem, Completions, CompletionKind, CompletionItemKind}; | 8 | use crate::completion::{CompletionContext, CompletionItem, Completions, CompletionKind, CompletionItemKind}; |
9 | 9 | ||
10 | pub(super) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionContext) { | ||
11 | // complete keyword "crate" in use stmt | ||
12 | match (ctx.use_item_syntax.as_ref(), ctx.path_prefix.as_ref()) { | ||
13 | (Some(_), None) => { | ||
14 | CompletionItem::new(CompletionKind::Keyword, "crate") | ||
15 | .kind(CompletionItemKind::Keyword) | ||
16 | .lookup_by("crate") | ||
17 | .snippet("crate::") | ||
18 | .add_to(acc); | ||
19 | CompletionItem::new(CompletionKind::Keyword, "self") | ||
20 | .kind(CompletionItemKind::Keyword) | ||
21 | .lookup_by("self") | ||
22 | .add_to(acc); | ||
23 | CompletionItem::new(CompletionKind::Keyword, "super") | ||
24 | .kind(CompletionItemKind::Keyword) | ||
25 | .lookup_by("super") | ||
26 | .add_to(acc); | ||
27 | } | ||
28 | (Some(_), Some(_)) => { | ||
29 | CompletionItem::new(CompletionKind::Keyword, "self") | ||
30 | .kind(CompletionItemKind::Keyword) | ||
31 | .lookup_by("self") | ||
32 | .add_to(acc); | ||
33 | CompletionItem::new(CompletionKind::Keyword, "super") | ||
34 | .kind(CompletionItemKind::Keyword) | ||
35 | .lookup_by("super") | ||
36 | .add_to(acc); | ||
37 | } | ||
38 | _ => {} | ||
39 | } | ||
40 | } | ||
41 | |||
10 | fn keyword(kw: &str, snippet: &str) -> CompletionItem { | 42 | fn keyword(kw: &str, snippet: &str) -> CompletionItem { |
11 | CompletionItem::new(CompletionKind::Keyword, kw) | 43 | CompletionItem::new(CompletionKind::Keyword, kw) |
12 | .kind(CompletionItemKind::Keyword) | 44 | .kind(CompletionItemKind::Keyword) |
@@ -18,6 +50,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte | |||
18 | if !ctx.is_trivial_path { | 50 | if !ctx.is_trivial_path { |
19 | return; | 51 | return; |
20 | } | 52 | } |
53 | |||
21 | let fn_def = match ctx.function_syntax { | 54 | let fn_def = match ctx.function_syntax { |
22 | Some(it) => it, | 55 | Some(it) => it, |
23 | None => return, | 56 | None => return, |
@@ -80,6 +113,40 @@ mod tests { | |||
80 | } | 113 | } |
81 | 114 | ||
82 | #[test] | 115 | #[test] |
116 | fn completes_keywords_in_use_stmt() { | ||
117 | check_keyword_completion( | ||
118 | r" | ||
119 | use <|> | ||
120 | ", | ||
121 | r#" | ||
122 | crate "crate" "crate::" | ||
123 | self "self" | ||
124 | super "super" | ||
125 | "#, | ||
126 | ); | ||
127 | |||
128 | check_keyword_completion( | ||
129 | r" | ||
130 | use a::<|> | ||
131 | ", | ||
132 | r#" | ||
133 | self "self" | ||
134 | super "super" | ||
135 | "#, | ||
136 | ); | ||
137 | |||
138 | check_keyword_completion( | ||
139 | r" | ||
140 | use a::{b, <|>} | ||
141 | ", | ||
142 | r#" | ||
143 | self "self" | ||
144 | super "super" | ||
145 | "#, | ||
146 | ); | ||
147 | } | ||
148 | |||
149 | #[test] | ||
83 | fn completes_various_keywords_in_function() { | 150 | fn completes_various_keywords_in_function() { |
84 | check_keyword_completion( | 151 | check_keyword_completion( |
85 | r" | 152 | r" |
diff --git a/crates/ra_analysis/src/completion/complete_scope.rs b/crates/ra_analysis/src/completion/complete_scope.rs index daf666505..4dead3689 100644 --- a/crates/ra_analysis/src/completion/complete_scope.rs +++ b/crates/ra_analysis/src/completion/complete_scope.rs | |||
@@ -27,7 +27,7 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) -> | |||
27 | match res.import { | 27 | match res.import { |
28 | None => true, | 28 | None => true, |
29 | Some(import) => { | 29 | Some(import) => { |
30 | let range = import.range(ctx.db, module.source().file_id()); | 30 | let range = import.range(ctx.db, module.file_id()); |
31 | !range.is_subrange(&ctx.leaf.range()) | 31 | !range.is_subrange(&ctx.leaf.range()) |
32 | } | 32 | } |
33 | } | 33 | } |
diff --git a/crates/ra_analysis/src/db.rs b/crates/ra_analysis/src/db.rs index b072a5eba..d7740f0c4 100644 --- a/crates/ra_analysis/src/db.rs +++ b/crates/ra_analysis/src/db.rs | |||
@@ -1,7 +1,6 @@ | |||
1 | use std::{fmt, sync::Arc}; | 1 | use std::{fmt, sync::Arc}; |
2 | use salsa::{self, Database}; | 2 | use salsa::{self, Database}; |
3 | use ra_db::{LocationIntener, BaseDatabase}; | 3 | use ra_db::{LocationIntener, BaseDatabase}; |
4 | use hir::{self, DefId, DefLoc}; | ||
5 | 4 | ||
6 | use crate::{ | 5 | use crate::{ |
7 | symbol_index, | 6 | symbol_index, |
@@ -15,7 +14,8 @@ pub(crate) struct RootDatabase { | |||
15 | 14 | ||
16 | #[derive(Default)] | 15 | #[derive(Default)] |
17 | struct IdMaps { | 16 | struct IdMaps { |
18 | defs: LocationIntener<DefLoc, DefId>, | 17 | defs: LocationIntener<hir::DefLoc, hir::DefId>, |
18 | macros: LocationIntener<hir::MacroCallLoc, hir::MacroCallId>, | ||
19 | } | 19 | } |
20 | 20 | ||
21 | impl fmt::Debug for IdMaps { | 21 | impl fmt::Debug for IdMaps { |
@@ -59,12 +59,18 @@ impl salsa::ParallelDatabase for RootDatabase { | |||
59 | 59 | ||
60 | impl BaseDatabase for RootDatabase {} | 60 | impl BaseDatabase for RootDatabase {} |
61 | 61 | ||
62 | impl AsRef<LocationIntener<DefLoc, DefId>> for RootDatabase { | 62 | impl AsRef<LocationIntener<hir::DefLoc, hir::DefId>> for RootDatabase { |
63 | fn as_ref(&self) -> &LocationIntener<DefLoc, DefId> { | 63 | fn as_ref(&self) -> &LocationIntener<hir::DefLoc, hir::DefId> { |
64 | &self.id_maps.defs | 64 | &self.id_maps.defs |
65 | } | 65 | } |
66 | } | 66 | } |
67 | 67 | ||
68 | impl AsRef<LocationIntener<hir::MacroCallLoc, hir::MacroCallId>> for RootDatabase { | ||
69 | fn as_ref(&self) -> &LocationIntener<hir::MacroCallLoc, hir::MacroCallId> { | ||
70 | &self.id_maps.macros | ||
71 | } | ||
72 | } | ||
73 | |||
68 | salsa::database_storage! { | 74 | salsa::database_storage! { |
69 | pub(crate) struct RootDatabaseStorage for RootDatabase { | 75 | pub(crate) struct RootDatabaseStorage for RootDatabase { |
70 | impl ra_db::FilesDatabase { | 76 | impl ra_db::FilesDatabase { |
@@ -85,6 +91,8 @@ salsa::database_storage! { | |||
85 | fn library_symbols() for symbol_index::LibrarySymbolsQuery; | 91 | fn library_symbols() for symbol_index::LibrarySymbolsQuery; |
86 | } | 92 | } |
87 | impl hir::db::HirDatabase { | 93 | impl hir::db::HirDatabase { |
94 | fn hir_source_file() for hir::db::HirSourceFileQuery; | ||
95 | fn expand_macro_invocation() for hir::db::ExpandMacroCallQuery; | ||
88 | fn module_tree() for hir::db::ModuleTreeQuery; | 96 | fn module_tree() for hir::db::ModuleTreeQuery; |
89 | fn fn_scopes() for hir::db::FnScopesQuery; | 97 | fn fn_scopes() for hir::db::FnScopesQuery; |
90 | fn file_items() for hir::db::SourceFileItemsQuery; | 98 | fn file_items() for hir::db::SourceFileItemsQuery; |
diff --git a/crates/ra_analysis/src/extend_selection.rs b/crates/ra_analysis/src/extend_selection.rs index cde6ee101..f1b77f981 100644 --- a/crates/ra_analysis/src/extend_selection.rs +++ b/crates/ra_analysis/src/extend_selection.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | use ra_db::SyntaxDatabase; | 1 | use ra_db::SyntaxDatabase; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | SyntaxNodeRef, AstNode, | 3 | SyntaxNodeRef, AstNode, SourceFileNode, |
4 | ast, algo::find_covering_node, | 4 | ast, algo::find_covering_node, |
5 | }; | 5 | }; |
6 | 6 | ||
@@ -11,18 +11,23 @@ use crate::{ | |||
11 | 11 | ||
12 | pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { | 12 | pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { |
13 | let source_file = db.source_file(frange.file_id); | 13 | let source_file = db.source_file(frange.file_id); |
14 | if let Some(macro_call) = find_macro_call(source_file.syntax(), frange.range) { | 14 | if let Some(range) = extend_selection_in_macro(db, &source_file, frange) { |
15 | if let Some(exp) = crate::macros::expand(db, frange.file_id, macro_call) { | 15 | return range; |
16 | if let Some(dst_range) = exp.map_range_forward(frange.range) { | ||
17 | if let Some(dst_range) = ra_editor::extend_selection(exp.source_file(), dst_range) { | ||
18 | if let Some(src_range) = exp.map_range_back(dst_range) { | ||
19 | return src_range; | ||
20 | } | ||
21 | } | ||
22 | } | ||
23 | } | ||
24 | } | 16 | } |
25 | ra_editor::extend_selection(&source_file, frange.range).unwrap_or(frange.range) | 17 | ra_editor::extend_selection(source_file.syntax(), frange.range).unwrap_or(frange.range) |
18 | } | ||
19 | |||
20 | fn extend_selection_in_macro( | ||
21 | _db: &RootDatabase, | ||
22 | source_file: &SourceFileNode, | ||
23 | frange: FileRange, | ||
24 | ) -> Option<TextRange> { | ||
25 | let macro_call = find_macro_call(source_file.syntax(), frange.range)?; | ||
26 | let (off, exp) = hir::MacroDef::ast_expand(macro_call)?; | ||
27 | let dst_range = exp.map_range_forward(frange.range - off)?; | ||
28 | let dst_range = ra_editor::extend_selection(exp.syntax().borrowed(), dst_range)?; | ||
29 | let src_range = exp.map_range_back(dst_range)? + off; | ||
30 | Some(src_range) | ||
26 | } | 31 | } |
27 | 32 | ||
28 | fn find_macro_call(node: SyntaxNodeRef, range: TextRange) -> Option<ast::MacroCall> { | 33 | fn find_macro_call(node: SyntaxNodeRef, range: TextRange) -> Option<ast::MacroCall> { |
diff --git a/crates/ra_analysis/src/imp.rs b/crates/ra_analysis/src/imp.rs index 5ed374c79..eae73c2c4 100644 --- a/crates/ra_analysis/src/imp.rs +++ b/crates/ra_analysis/src/imp.rs | |||
@@ -1,59 +1,42 @@ | |||
1 | use std::{ | 1 | use std::sync::Arc; |
2 | fmt, | ||
3 | sync::Arc, | ||
4 | }; | ||
5 | 2 | ||
6 | use rayon::prelude::*; | 3 | use salsa::Database; |
7 | use salsa::{Database, ParallelDatabase}; | ||
8 | 4 | ||
9 | use hir::{ | 5 | use hir::{ |
10 | self, FnSignatureInfo, Problem, source_binder, | 6 | self, FnSignatureInfo, Problem, source_binder, |
11 | }; | 7 | }; |
12 | use ra_db::{FilesDatabase, SourceRoot, SourceRootId, SyntaxDatabase}; | 8 | use ra_db::{FilesDatabase, SourceRoot, SourceRootId, SyntaxDatabase}; |
13 | use ra_editor::{self, FileSymbol, find_node_at_offset, LineIndex, LocalEdit, Severity}; | 9 | use ra_editor::{self, find_node_at_offset, assists, LocalEdit, Severity}; |
14 | use ra_syntax::{ | 10 | use ra_syntax::{ |
15 | algo::find_covering_node, | 11 | algo::{find_covering_node, visit::{visitor, Visitor}}, |
16 | ast::{self, ArgListOwner, Expr, FnDef, NameOwner}, | 12 | ast::{self, ArgListOwner, Expr, FnDef, NameOwner}, |
17 | AstNode, SourceFileNode, | 13 | AstNode, SourceFileNode, |
18 | SyntaxKind::*, | 14 | SyntaxKind::*, |
19 | SyntaxNodeRef, TextRange, TextUnit, | 15 | SyntaxNode, SyntaxNodeRef, TextRange, TextUnit, |
20 | }; | 16 | }; |
21 | 17 | ||
22 | use crate::{ | 18 | use crate::{ |
23 | AnalysisChange, | 19 | AnalysisChange, |
24 | Cancelable, | 20 | Cancelable, NavigationTarget, |
25 | completion::{CompletionItem, completions}, | ||
26 | CrateId, db, Diagnostic, FileId, FilePosition, FileRange, FileSystemEdit, | 21 | CrateId, db, Diagnostic, FileId, FilePosition, FileRange, FileSystemEdit, |
27 | Query, ReferenceResolution, RootChange, SourceChange, SourceFileEdit, | 22 | Query, ReferenceResolution, RootChange, SourceChange, SourceFileEdit, |
28 | symbol_index::{LibrarySymbolsQuery, SymbolIndex, SymbolsDatabase}, | 23 | symbol_index::{LibrarySymbolsQuery, FileSymbol}, |
29 | }; | 24 | }; |
30 | 25 | ||
31 | #[derive(Debug, Default)] | 26 | impl db::RootDatabase { |
32 | pub(crate) struct AnalysisHostImpl { | 27 | pub(crate) fn apply_change(&mut self, change: AnalysisChange) { |
33 | db: db::RootDatabase, | ||
34 | } | ||
35 | |||
36 | impl AnalysisHostImpl { | ||
37 | pub fn analysis(&self) -> AnalysisImpl { | ||
38 | AnalysisImpl { | ||
39 | db: self.db.snapshot(), | ||
40 | } | ||
41 | } | ||
42 | pub fn apply_change(&mut self, change: AnalysisChange) { | ||
43 | log::info!("apply_change {:?}", change); | 28 | log::info!("apply_change {:?}", change); |
44 | // self.gc_syntax_trees(); | 29 | // self.gc_syntax_trees(); |
45 | if !change.new_roots.is_empty() { | 30 | if !change.new_roots.is_empty() { |
46 | let mut local_roots = Vec::clone(&self.db.local_roots()); | 31 | let mut local_roots = Vec::clone(&self.local_roots()); |
47 | for (root_id, is_local) in change.new_roots { | 32 | for (root_id, is_local) in change.new_roots { |
48 | self.db | 33 | self.query_mut(ra_db::SourceRootQuery) |
49 | .query_mut(ra_db::SourceRootQuery) | ||
50 | .set(root_id, Default::default()); | 34 | .set(root_id, Default::default()); |
51 | if is_local { | 35 | if is_local { |
52 | local_roots.push(root_id); | 36 | local_roots.push(root_id); |
53 | } | 37 | } |
54 | } | 38 | } |
55 | self.db | 39 | self.query_mut(ra_db::LocalRootsQuery) |
56 | .query_mut(ra_db::LocalRootsQuery) | ||
57 | .set((), Arc::new(local_roots)); | 40 | .set((), Arc::new(local_roots)); |
58 | } | 41 | } |
59 | 42 | ||
@@ -61,53 +44,44 @@ impl AnalysisHostImpl { | |||
61 | self.apply_root_change(root_id, root_change); | 44 | self.apply_root_change(root_id, root_change); |
62 | } | 45 | } |
63 | for (file_id, text) in change.files_changed { | 46 | for (file_id, text) in change.files_changed { |
64 | self.db.query_mut(ra_db::FileTextQuery).set(file_id, text) | 47 | self.query_mut(ra_db::FileTextQuery).set(file_id, text) |
65 | } | 48 | } |
66 | if !change.libraries_added.is_empty() { | 49 | if !change.libraries_added.is_empty() { |
67 | let mut libraries = Vec::clone(&self.db.library_roots()); | 50 | let mut libraries = Vec::clone(&self.library_roots()); |
68 | for library in change.libraries_added { | 51 | for library in change.libraries_added { |
69 | libraries.push(library.root_id); | 52 | libraries.push(library.root_id); |
70 | self.db | 53 | self.query_mut(ra_db::SourceRootQuery) |
71 | .query_mut(ra_db::SourceRootQuery) | ||
72 | .set(library.root_id, Default::default()); | 54 | .set(library.root_id, Default::default()); |
73 | self.db | 55 | self.query_mut(LibrarySymbolsQuery) |
74 | .query_mut(LibrarySymbolsQuery) | ||
75 | .set_constant(library.root_id, Arc::new(library.symbol_index)); | 56 | .set_constant(library.root_id, Arc::new(library.symbol_index)); |
76 | self.apply_root_change(library.root_id, library.root_change); | 57 | self.apply_root_change(library.root_id, library.root_change); |
77 | } | 58 | } |
78 | self.db | 59 | self.query_mut(ra_db::LibraryRootsQuery) |
79 | .query_mut(ra_db::LibraryRootsQuery) | ||
80 | .set((), Arc::new(libraries)); | 60 | .set((), Arc::new(libraries)); |
81 | } | 61 | } |
82 | if let Some(crate_graph) = change.crate_graph { | 62 | if let Some(crate_graph) = change.crate_graph { |
83 | self.db | 63 | self.query_mut(ra_db::CrateGraphQuery) |
84 | .query_mut(ra_db::CrateGraphQuery) | ||
85 | .set((), Arc::new(crate_graph)) | 64 | .set((), Arc::new(crate_graph)) |
86 | } | 65 | } |
87 | } | 66 | } |
88 | 67 | ||
89 | fn apply_root_change(&mut self, root_id: SourceRootId, root_change: RootChange) { | 68 | fn apply_root_change(&mut self, root_id: SourceRootId, root_change: RootChange) { |
90 | let mut source_root = SourceRoot::clone(&self.db.source_root(root_id)); | 69 | let mut source_root = SourceRoot::clone(&self.source_root(root_id)); |
91 | for add_file in root_change.added { | 70 | for add_file in root_change.added { |
92 | self.db | 71 | self.query_mut(ra_db::FileTextQuery) |
93 | .query_mut(ra_db::FileTextQuery) | ||
94 | .set(add_file.file_id, add_file.text); | 72 | .set(add_file.file_id, add_file.text); |
95 | self.db | 73 | self.query_mut(ra_db::FileRelativePathQuery) |
96 | .query_mut(ra_db::FileRelativePathQuery) | ||
97 | .set(add_file.file_id, add_file.path.clone()); | 74 | .set(add_file.file_id, add_file.path.clone()); |
98 | self.db | 75 | self.query_mut(ra_db::FileSourceRootQuery) |
99 | .query_mut(ra_db::FileSourceRootQuery) | ||
100 | .set(add_file.file_id, root_id); | 76 | .set(add_file.file_id, root_id); |
101 | source_root.files.insert(add_file.path, add_file.file_id); | 77 | source_root.files.insert(add_file.path, add_file.file_id); |
102 | } | 78 | } |
103 | for remove_file in root_change.removed { | 79 | for remove_file in root_change.removed { |
104 | self.db | 80 | self.query_mut(ra_db::FileTextQuery) |
105 | .query_mut(ra_db::FileTextQuery) | ||
106 | .set(remove_file.file_id, Default::default()); | 81 | .set(remove_file.file_id, Default::default()); |
107 | source_root.files.remove(&remove_file.path); | 82 | source_root.files.remove(&remove_file.path); |
108 | } | 83 | } |
109 | self.db | 84 | self.query_mut(ra_db::SourceRootQuery) |
110 | .query_mut(ra_db::SourceRootQuery) | ||
111 | .set(root_id, Arc::new(source_root)); | 85 | .set(root_id, Arc::new(source_root)); |
112 | } | 86 | } |
113 | 87 | ||
@@ -116,142 +90,86 @@ impl AnalysisHostImpl { | |||
116 | /// syntax trees. However, if we actually do that, everything is recomputed | 90 | /// syntax trees. However, if we actually do that, everything is recomputed |
117 | /// for some reason. Needs investigation. | 91 | /// for some reason. Needs investigation. |
118 | fn gc_syntax_trees(&mut self) { | 92 | fn gc_syntax_trees(&mut self) { |
119 | self.db | 93 | self.query(ra_db::SourceFileQuery) |
120 | .query(ra_db::SourceFileQuery) | ||
121 | .sweep(salsa::SweepStrategy::default().discard_values()); | 94 | .sweep(salsa::SweepStrategy::default().discard_values()); |
122 | self.db | 95 | self.query(hir::db::SourceFileItemsQuery) |
123 | .query(hir::db::SourceFileItemsQuery) | ||
124 | .sweep(salsa::SweepStrategy::default().discard_values()); | 96 | .sweep(salsa::SweepStrategy::default().discard_values()); |
125 | self.db | 97 | self.query(hir::db::FileItemQuery) |
126 | .query(hir::db::FileItemQuery) | ||
127 | .sweep(salsa::SweepStrategy::default().discard_values()); | 98 | .sweep(salsa::SweepStrategy::default().discard_values()); |
128 | } | 99 | } |
129 | } | 100 | } |
130 | 101 | ||
131 | pub(crate) struct AnalysisImpl { | 102 | impl db::RootDatabase { |
132 | pub(crate) db: salsa::Snapshot<db::RootDatabase>, | ||
133 | } | ||
134 | |||
135 | impl fmt::Debug for AnalysisImpl { | ||
136 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
137 | let db: &db::RootDatabase = &self.db; | ||
138 | fmt.debug_struct("AnalysisImpl").field("db", db).finish() | ||
139 | } | ||
140 | } | ||
141 | |||
142 | impl AnalysisImpl { | ||
143 | pub fn file_text(&self, file_id: FileId) -> Arc<String> { | ||
144 | self.db.file_text(file_id) | ||
145 | } | ||
146 | pub fn file_syntax(&self, file_id: FileId) -> SourceFileNode { | ||
147 | self.db.source_file(file_id) | ||
148 | } | ||
149 | pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> { | ||
150 | self.db.file_lines(file_id) | ||
151 | } | ||
152 | pub fn world_symbols(&self, query: Query) -> Cancelable<Vec<(FileId, FileSymbol)>> { | ||
153 | /// Need to wrap Snapshot to provide `Clone` impl for `map_with` | ||
154 | struct Snap(salsa::Snapshot<db::RootDatabase>); | ||
155 | impl Clone for Snap { | ||
156 | fn clone(&self) -> Snap { | ||
157 | Snap(self.0.snapshot()) | ||
158 | } | ||
159 | } | ||
160 | |||
161 | let buf: Vec<Arc<SymbolIndex>> = if query.libs { | ||
162 | let snap = Snap(self.db.snapshot()); | ||
163 | self.db | ||
164 | .library_roots() | ||
165 | .par_iter() | ||
166 | .map_with(snap, |db, &lib_id| db.0.library_symbols(lib_id)) | ||
167 | .collect() | ||
168 | } else { | ||
169 | let mut files = Vec::new(); | ||
170 | for &root in self.db.local_roots().iter() { | ||
171 | let sr = self.db.source_root(root); | ||
172 | files.extend(sr.files.values().map(|&it| it)) | ||
173 | } | ||
174 | |||
175 | let snap = Snap(self.db.snapshot()); | ||
176 | files | ||
177 | .par_iter() | ||
178 | .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) | ||
179 | .filter_map(|it| it.ok()) | ||
180 | .collect() | ||
181 | }; | ||
182 | Ok(query.search(&buf)) | ||
183 | } | ||
184 | /// This returns `Vec` because a module may be included from several places. We | 103 | /// This returns `Vec` because a module may be included from several places. We |
185 | /// don't handle this case yet though, so the Vec has length at most one. | 104 | /// don't handle this case yet though, so the Vec has length at most one. |
186 | pub fn parent_module(&self, position: FilePosition) -> Cancelable<Vec<(FileId, FileSymbol)>> { | 105 | pub(crate) fn parent_module( |
187 | let descr = match source_binder::module_from_position(&*self.db, position)? { | 106 | &self, |
107 | position: FilePosition, | ||
108 | ) -> Cancelable<Vec<NavigationTarget>> { | ||
109 | let descr = match source_binder::module_from_position(self, position)? { | ||
188 | None => return Ok(Vec::new()), | 110 | None => return Ok(Vec::new()), |
189 | Some(it) => it, | 111 | Some(it) => it, |
190 | }; | 112 | }; |
191 | let (file_id, decl) = match descr.parent_link_source(&*self.db) { | 113 | let (file_id, decl) = match descr.parent_link_source(self) { |
192 | None => return Ok(Vec::new()), | 114 | None => return Ok(Vec::new()), |
193 | Some(it) => it, | 115 | Some(it) => it, |
194 | }; | 116 | }; |
195 | let decl = decl.borrowed(); | 117 | let decl = decl.borrowed(); |
196 | let decl_name = decl.name().unwrap(); | 118 | let decl_name = decl.name().unwrap(); |
197 | let sym = FileSymbol { | 119 | Ok(vec![NavigationTarget { |
120 | file_id, | ||
198 | name: decl_name.text(), | 121 | name: decl_name.text(), |
199 | node_range: decl_name.syntax().range(), | 122 | range: decl_name.syntax().range(), |
200 | kind: MODULE, | 123 | kind: MODULE, |
201 | }; | 124 | ptr: None, |
202 | Ok(vec![(file_id, sym)]) | 125 | }]) |
203 | } | 126 | } |
204 | /// Returns `Vec` for the same reason as `parent_module` | 127 | /// Returns `Vec` for the same reason as `parent_module` |
205 | pub fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> { | 128 | pub(crate) fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> { |
206 | let descr = match source_binder::module_from_file_id(&*self.db, file_id)? { | 129 | let descr = match source_binder::module_from_file_id(self, file_id)? { |
207 | None => return Ok(Vec::new()), | 130 | None => return Ok(Vec::new()), |
208 | Some(it) => it, | 131 | Some(it) => it, |
209 | }; | 132 | }; |
210 | let root = descr.crate_root(); | 133 | let root = descr.crate_root(); |
211 | let file_id = root.source().file_id(); | 134 | let file_id = root.file_id(); |
212 | 135 | ||
213 | let crate_graph = self.db.crate_graph(); | 136 | let crate_graph = self.crate_graph(); |
214 | let crate_id = crate_graph.crate_id_for_crate_root(file_id); | 137 | let crate_id = crate_graph.crate_id_for_crate_root(file_id); |
215 | Ok(crate_id.into_iter().collect()) | 138 | Ok(crate_id.into_iter().collect()) |
216 | } | 139 | } |
217 | pub fn crate_root(&self, crate_id: CrateId) -> FileId { | 140 | pub(crate) fn crate_root(&self, crate_id: CrateId) -> FileId { |
218 | self.db.crate_graph().crate_root(crate_id) | 141 | self.crate_graph().crate_root(crate_id) |
219 | } | 142 | } |
220 | pub fn completions(&self, position: FilePosition) -> Cancelable<Option<Vec<CompletionItem>>> { | 143 | pub(crate) fn approximately_resolve_symbol( |
221 | let completions = completions(&self.db, position)?; | ||
222 | Ok(completions.map(|it| it.into())) | ||
223 | } | ||
224 | pub fn approximately_resolve_symbol( | ||
225 | &self, | 144 | &self, |
226 | position: FilePosition, | 145 | position: FilePosition, |
227 | ) -> Cancelable<Option<ReferenceResolution>> { | 146 | ) -> Cancelable<Option<ReferenceResolution>> { |
228 | let file = self.db.source_file(position.file_id); | 147 | let file = self.source_file(position.file_id); |
229 | let syntax = file.syntax(); | 148 | let syntax = file.syntax(); |
230 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) { | 149 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) { |
231 | let mut rr = ReferenceResolution::new(name_ref.syntax().range()); | 150 | let mut rr = ReferenceResolution::new(name_ref.syntax().range()); |
232 | if let Some(fn_descr) = source_binder::function_from_child_node( | 151 | if let Some(fn_descr) = |
233 | &*self.db, | 152 | source_binder::function_from_child_node(self, position.file_id, name_ref.syntax())? |
234 | position.file_id, | 153 | { |
235 | name_ref.syntax(), | 154 | let scope = fn_descr.scopes(self); |
236 | )? { | ||
237 | let scope = fn_descr.scopes(&*self.db); | ||
238 | // First try to resolve the symbol locally | 155 | // First try to resolve the symbol locally |
239 | if let Some(entry) = scope.resolve_local_name(name_ref) { | 156 | if let Some(entry) = scope.resolve_local_name(name_ref) { |
240 | rr.add_resolution( | 157 | rr.resolves_to.push(NavigationTarget { |
241 | position.file_id, | 158 | file_id: position.file_id, |
242 | FileSymbol { | 159 | name: entry.name().to_string().into(), |
243 | name: entry.name().to_string().into(), | 160 | range: entry.ptr().range(), |
244 | node_range: entry.ptr().range(), | 161 | kind: NAME, |
245 | kind: NAME, | 162 | ptr: None, |
246 | }, | 163 | }); |
247 | ); | ||
248 | return Ok(Some(rr)); | 164 | return Ok(Some(rr)); |
249 | }; | 165 | }; |
250 | } | 166 | } |
251 | // If that fails try the index based approach. | 167 | // If that fails try the index based approach. |
252 | for (file_id, symbol) in self.index_resolve(name_ref)? { | 168 | rr.resolves_to.extend( |
253 | rr.add_resolution(file_id, symbol); | 169 | self.index_resolve(name_ref)? |
254 | } | 170 | .into_iter() |
171 | .map(NavigationTarget::from_symbol), | ||
172 | ); | ||
255 | return Ok(Some(rr)); | 173 | return Ok(Some(rr)); |
256 | } | 174 | } |
257 | if let Some(name) = find_node_at_offset::<ast::Name>(syntax, position.offset) { | 175 | if let Some(name) = find_node_at_offset::<ast::Name>(syntax, position.offset) { |
@@ -259,19 +177,21 @@ impl AnalysisImpl { | |||
259 | if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) { | 177 | if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) { |
260 | if module.has_semi() { | 178 | if module.has_semi() { |
261 | if let Some(child_module) = | 179 | if let Some(child_module) = |
262 | source_binder::module_from_declaration(&*self.db, position.file_id, module)? | 180 | source_binder::module_from_declaration(self, position.file_id, module)? |
263 | { | 181 | { |
264 | let file_id = child_module.source().file_id(); | 182 | let file_id = child_module.file_id(); |
265 | let name = match child_module.name() { | 183 | let name = match child_module.name() { |
266 | Some(name) => name.to_string().into(), | 184 | Some(name) => name.to_string().into(), |
267 | None => "".into(), | 185 | None => "".into(), |
268 | }; | 186 | }; |
269 | let symbol = FileSymbol { | 187 | let symbol = NavigationTarget { |
188 | file_id, | ||
270 | name, | 189 | name, |
271 | node_range: TextRange::offset_len(0.into(), 0.into()), | 190 | range: TextRange::offset_len(0.into(), 0.into()), |
272 | kind: MODULE, | 191 | kind: MODULE, |
192 | ptr: None, | ||
273 | }; | 193 | }; |
274 | rr.add_resolution(file_id, symbol); | 194 | rr.resolves_to.push(symbol); |
275 | return Ok(Some(rr)); | 195 | return Ok(Some(rr)); |
276 | } | 196 | } |
277 | } | 197 | } |
@@ -280,10 +200,13 @@ impl AnalysisImpl { | |||
280 | Ok(None) | 200 | Ok(None) |
281 | } | 201 | } |
282 | 202 | ||
283 | pub fn find_all_refs(&self, position: FilePosition) -> Cancelable<Vec<(FileId, TextRange)>> { | 203 | pub(crate) fn find_all_refs( |
284 | let file = self.db.source_file(position.file_id); | 204 | &self, |
205 | position: FilePosition, | ||
206 | ) -> Cancelable<Vec<(FileId, TextRange)>> { | ||
207 | let file = self.source_file(position.file_id); | ||
285 | // Find the binding associated with the offset | 208 | // Find the binding associated with the offset |
286 | let (binding, descr) = match find_binding(&self.db, &file, position)? { | 209 | let (binding, descr) = match find_binding(self, &file, position)? { |
287 | None => return Ok(Vec::new()), | 210 | None => return Ok(Vec::new()), |
288 | Some(it) => it, | 211 | Some(it) => it, |
289 | }; | 212 | }; |
@@ -295,7 +218,7 @@ impl AnalysisImpl { | |||
295 | .collect::<Vec<_>>(); | 218 | .collect::<Vec<_>>(); |
296 | ret.extend( | 219 | ret.extend( |
297 | descr | 220 | descr |
298 | .scopes(&*self.db) | 221 | .scopes(self) |
299 | .find_all_refs(binding) | 222 | .find_all_refs(binding) |
300 | .into_iter() | 223 | .into_iter() |
301 | .map(|ref_desc| (position.file_id, ref_desc.range)), | 224 | .map(|ref_desc| (position.file_id, ref_desc.range)), |
@@ -333,9 +256,8 @@ impl AnalysisImpl { | |||
333 | Ok(Some((binding, descr))) | 256 | Ok(Some((binding, descr))) |
334 | } | 257 | } |
335 | } | 258 | } |
336 | pub fn doc_text_for(&self, file_id: FileId, symbol: FileSymbol) -> Cancelable<Option<String>> { | 259 | pub(crate) fn doc_text_for(&self, nav: NavigationTarget) -> Cancelable<Option<String>> { |
337 | let file = self.db.source_file(file_id); | 260 | let result = match (nav.description(self), nav.docs(self)) { |
338 | let result = match (symbol.description(&file), symbol.docs(&file)) { | ||
339 | (Some(desc), Some(docs)) => { | 261 | (Some(desc), Some(docs)) => { |
340 | Some("```rust\n".to_string() + &*desc + "\n```\n\n" + &*docs) | 262 | Some("```rust\n".to_string() + &*desc + "\n```\n\n" + &*docs) |
341 | } | 263 | } |
@@ -347,8 +269,8 @@ impl AnalysisImpl { | |||
347 | Ok(result) | 269 | Ok(result) |
348 | } | 270 | } |
349 | 271 | ||
350 | pub fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> { | 272 | pub(crate) fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> { |
351 | let syntax = self.db.source_file(file_id); | 273 | let syntax = self.source_file(file_id); |
352 | 274 | ||
353 | let mut res = ra_editor::diagnostics(&syntax) | 275 | let mut res = ra_editor::diagnostics(&syntax) |
354 | .into_iter() | 276 | .into_iter() |
@@ -359,9 +281,9 @@ impl AnalysisImpl { | |||
359 | fix: d.fix.map(|fix| SourceChange::from_local_edit(file_id, fix)), | 281 | fix: d.fix.map(|fix| SourceChange::from_local_edit(file_id, fix)), |
360 | }) | 282 | }) |
361 | .collect::<Vec<_>>(); | 283 | .collect::<Vec<_>>(); |
362 | if let Some(m) = source_binder::module_from_file_id(&*self.db, file_id)? { | 284 | if let Some(m) = source_binder::module_from_file_id(self, file_id)? { |
363 | for (name_node, problem) in m.problems(&*self.db) { | 285 | for (name_node, problem) in m.problems(self) { |
364 | let source_root = self.db.file_source_root(file_id); | 286 | let source_root = self.file_source_root(file_id); |
365 | let diag = match problem { | 287 | let diag = match problem { |
366 | Problem::UnresolvedModule { candidate } => { | 288 | Problem::UnresolvedModule { candidate } => { |
367 | let create_file = FileSystemEdit::CreateFile { | 289 | let create_file = FileSystemEdit::CreateFile { |
@@ -411,29 +333,19 @@ impl AnalysisImpl { | |||
411 | Ok(res) | 333 | Ok(res) |
412 | } | 334 | } |
413 | 335 | ||
414 | pub fn assists(&self, frange: FileRange) -> Vec<SourceChange> { | 336 | pub(crate) fn assists(&self, frange: FileRange) -> Vec<SourceChange> { |
415 | let file = self.file_syntax(frange.file_id); | 337 | let file = self.source_file(frange.file_id); |
416 | let offset = frange.range.start(); | 338 | assists::assists(&file, frange.range) |
417 | let actions = vec![ | ||
418 | ra_editor::flip_comma(&file, offset).map(|f| f()), | ||
419 | ra_editor::add_derive(&file, offset).map(|f| f()), | ||
420 | ra_editor::add_impl(&file, offset).map(|f| f()), | ||
421 | ra_editor::make_pub_crate(&file, offset).map(|f| f()), | ||
422 | ra_editor::introduce_variable(&file, frange.range).map(|f| f()), | ||
423 | ]; | ||
424 | actions | ||
425 | .into_iter() | 339 | .into_iter() |
426 | .filter_map(|local_edit| { | 340 | .map(|local_edit| SourceChange::from_local_edit(frange.file_id, local_edit)) |
427 | Some(SourceChange::from_local_edit(frange.file_id, local_edit?)) | ||
428 | }) | ||
429 | .collect() | 341 | .collect() |
430 | } | 342 | } |
431 | 343 | ||
432 | pub fn resolve_callable( | 344 | pub(crate) fn resolve_callable( |
433 | &self, | 345 | &self, |
434 | position: FilePosition, | 346 | position: FilePosition, |
435 | ) -> Cancelable<Option<(FnSignatureInfo, Option<usize>)>> { | 347 | ) -> Cancelable<Option<(FnSignatureInfo, Option<usize>)>> { |
436 | let file = self.db.source_file(position.file_id); | 348 | let file = self.source_file(position.file_id); |
437 | let syntax = file.syntax(); | 349 | let syntax = file.syntax(); |
438 | 350 | ||
439 | // Find the calling expression and it's NameRef | 351 | // Find the calling expression and it's NameRef |
@@ -442,53 +354,55 @@ impl AnalysisImpl { | |||
442 | 354 | ||
443 | // Resolve the function's NameRef (NOTE: this isn't entirely accurate). | 355 | // Resolve the function's NameRef (NOTE: this isn't entirely accurate). |
444 | let file_symbols = self.index_resolve(name_ref)?; | 356 | let file_symbols = self.index_resolve(name_ref)?; |
445 | for (fn_file_id, fs) in file_symbols { | 357 | for symbol in file_symbols { |
446 | if fs.kind == FN_DEF { | 358 | if symbol.ptr.kind() == FN_DEF { |
447 | let fn_file = self.db.source_file(fn_file_id); | 359 | let fn_file = self.source_file(symbol.file_id); |
448 | if let Some(fn_def) = find_node_at_offset(fn_file.syntax(), fs.node_range.start()) { | 360 | let fn_def = symbol.ptr.resolve(&fn_file); |
449 | let descr = ctry!(source_binder::function_from_source( | 361 | let fn_def = ast::FnDef::cast(fn_def.borrowed()).unwrap(); |
450 | &*self.db, fn_file_id, fn_def | 362 | let descr = ctry!(source_binder::function_from_source( |
451 | )?); | 363 | self, |
452 | if let Some(descriptor) = descr.signature_info(&*self.db) { | 364 | symbol.file_id, |
453 | // If we have a calling expression let's find which argument we are on | 365 | fn_def |
454 | let mut current_parameter = None; | 366 | )?); |
455 | 367 | if let Some(descriptor) = descr.signature_info(self) { | |
456 | let num_params = descriptor.params.len(); | 368 | // If we have a calling expression let's find which argument we are on |
457 | let has_self = fn_def.param_list().and_then(|l| l.self_param()).is_some(); | 369 | let mut current_parameter = None; |
458 | 370 | ||
459 | if num_params == 1 { | 371 | let num_params = descriptor.params.len(); |
460 | if !has_self { | 372 | let has_self = fn_def.param_list().and_then(|l| l.self_param()).is_some(); |
461 | current_parameter = Some(0); | 373 | |
462 | } | 374 | if num_params == 1 { |
463 | } else if num_params > 1 { | 375 | if !has_self { |
464 | // Count how many parameters into the call we are. | 376 | current_parameter = Some(0); |
465 | // TODO: This is best effort for now and should be fixed at some point. | ||
466 | // It may be better to see where we are in the arg_list and then check | ||
467 | // where offset is in that list (or beyond). | ||
468 | // Revisit this after we get documentation comments in. | ||
469 | if let Some(ref arg_list) = calling_node.arg_list() { | ||
470 | let start = arg_list.syntax().range().start(); | ||
471 | |||
472 | let range_search = TextRange::from_to(start, position.offset); | ||
473 | let mut commas: usize = arg_list | ||
474 | .syntax() | ||
475 | .text() | ||
476 | .slice(range_search) | ||
477 | .to_string() | ||
478 | .matches(',') | ||
479 | .count(); | ||
480 | |||
481 | // If we have a method call eat the first param since it's just self. | ||
482 | if has_self { | ||
483 | commas += 1; | ||
484 | } | ||
485 | |||
486 | current_parameter = Some(commas); | ||
487 | } | ||
488 | } | 377 | } |
378 | } else if num_params > 1 { | ||
379 | // Count how many parameters into the call we are. | ||
380 | // TODO: This is best effort for now and should be fixed at some point. | ||
381 | // It may be better to see where we are in the arg_list and then check | ||
382 | // where offset is in that list (or beyond). | ||
383 | // Revisit this after we get documentation comments in. | ||
384 | if let Some(ref arg_list) = calling_node.arg_list() { | ||
385 | let start = arg_list.syntax().range().start(); | ||
386 | |||
387 | let range_search = TextRange::from_to(start, position.offset); | ||
388 | let mut commas: usize = arg_list | ||
389 | .syntax() | ||
390 | .text() | ||
391 | .slice(range_search) | ||
392 | .to_string() | ||
393 | .matches(',') | ||
394 | .count(); | ||
395 | |||
396 | // If we have a method call eat the first param since it's just self. | ||
397 | if has_self { | ||
398 | commas += 1; | ||
399 | } | ||
489 | 400 | ||
490 | return Ok(Some((descriptor, current_parameter))); | 401 | current_parameter = Some(commas); |
402 | } | ||
491 | } | 403 | } |
404 | |||
405 | return Ok(Some((descriptor, current_parameter))); | ||
492 | } | 406 | } |
493 | } | 407 | } |
494 | } | 408 | } |
@@ -496,20 +410,20 @@ impl AnalysisImpl { | |||
496 | Ok(None) | 410 | Ok(None) |
497 | } | 411 | } |
498 | 412 | ||
499 | pub fn type_of(&self, frange: FileRange) -> Cancelable<Option<String>> { | 413 | pub(crate) fn type_of(&self, frange: FileRange) -> Cancelable<Option<String>> { |
500 | let file = self.db.source_file(frange.file_id); | 414 | let file = self.source_file(frange.file_id); |
501 | let syntax = file.syntax(); | 415 | let syntax = file.syntax(); |
502 | let node = find_covering_node(syntax, frange.range); | 416 | let node = find_covering_node(syntax, frange.range); |
503 | let parent_fn = ctry!(node.ancestors().find_map(FnDef::cast)); | 417 | let parent_fn = ctry!(node.ancestors().find_map(FnDef::cast)); |
504 | let function = ctry!(source_binder::function_from_source( | 418 | let function = ctry!(source_binder::function_from_source( |
505 | &*self.db, | 419 | self, |
506 | frange.file_id, | 420 | frange.file_id, |
507 | parent_fn | 421 | parent_fn |
508 | )?); | 422 | )?); |
509 | let infer = function.infer(&*self.db)?; | 423 | let infer = function.infer(self)?; |
510 | Ok(infer.type_of_node(node).map(|t| t.to_string())) | 424 | Ok(infer.type_of_node(node).map(|t| t.to_string())) |
511 | } | 425 | } |
512 | pub fn rename( | 426 | pub(crate) fn rename( |
513 | &self, | 427 | &self, |
514 | position: FilePosition, | 428 | position: FilePosition, |
515 | new_name: &str, | 429 | new_name: &str, |
@@ -520,7 +434,7 @@ impl AnalysisImpl { | |||
520 | .map(|(file_id, text_range)| SourceFileEdit { | 434 | .map(|(file_id, text_range)| SourceFileEdit { |
521 | file_id: *file_id, | 435 | file_id: *file_id, |
522 | edit: { | 436 | edit: { |
523 | let mut builder = ra_text_edit::TextEditBuilder::new(); | 437 | let mut builder = ra_text_edit::TextEditBuilder::default(); |
524 | builder.replace(*text_range, new_name.into()); | 438 | builder.replace(*text_range, new_name.into()); |
525 | builder.finish() | 439 | builder.finish() |
526 | }, | 440 | }, |
@@ -528,12 +442,12 @@ impl AnalysisImpl { | |||
528 | .collect::<Vec<_>>(); | 442 | .collect::<Vec<_>>(); |
529 | Ok(res) | 443 | Ok(res) |
530 | } | 444 | } |
531 | fn index_resolve(&self, name_ref: ast::NameRef) -> Cancelable<Vec<(FileId, FileSymbol)>> { | 445 | fn index_resolve(&self, name_ref: ast::NameRef) -> Cancelable<Vec<FileSymbol>> { |
532 | let name = name_ref.text(); | 446 | let name = name_ref.text(); |
533 | let mut query = Query::new(name.to_string()); | 447 | let mut query = Query::new(name.to_string()); |
534 | query.exact(); | 448 | query.exact(); |
535 | query.limit(4); | 449 | query.limit(4); |
536 | self.world_symbols(query) | 450 | crate::symbol_index::world_symbols(self, query) |
537 | } | 451 | } |
538 | } | 452 | } |
539 | 453 | ||
@@ -592,3 +506,91 @@ impl<'a> FnCallNode<'a> { | |||
592 | } | 506 | } |
593 | } | 507 | } |
594 | } | 508 | } |
509 | |||
510 | impl NavigationTarget { | ||
511 | fn node(&self, db: &db::RootDatabase) -> Option<SyntaxNode> { | ||
512 | let source_file = db.source_file(self.file_id); | ||
513 | let source_file = source_file.syntax(); | ||
514 | let node = source_file | ||
515 | .descendants() | ||
516 | .find(|node| node.kind() == self.kind && node.range() == self.range)? | ||
517 | .owned(); | ||
518 | Some(node) | ||
519 | } | ||
520 | |||
521 | fn docs(&self, db: &db::RootDatabase) -> Option<String> { | ||
522 | let node = self.node(db)?; | ||
523 | let node = node.borrowed(); | ||
524 | fn doc_comments<'a, N: ast::DocCommentsOwner<'a>>(node: N) -> Option<String> { | ||
525 | let comments = node.doc_comment_text(); | ||
526 | if comments.is_empty() { | ||
527 | None | ||
528 | } else { | ||
529 | Some(comments) | ||
530 | } | ||
531 | } | ||
532 | |||
533 | visitor() | ||
534 | .visit(doc_comments::<ast::FnDef>) | ||
535 | .visit(doc_comments::<ast::StructDef>) | ||
536 | .visit(doc_comments::<ast::EnumDef>) | ||
537 | .visit(doc_comments::<ast::TraitDef>) | ||
538 | .visit(doc_comments::<ast::Module>) | ||
539 | .visit(doc_comments::<ast::TypeDef>) | ||
540 | .visit(doc_comments::<ast::ConstDef>) | ||
541 | .visit(doc_comments::<ast::StaticDef>) | ||
542 | .accept(node)? | ||
543 | } | ||
544 | |||
545 | /// Get a description of this node. | ||
546 | /// | ||
547 | /// e.g. `struct Name`, `enum Name`, `fn Name` | ||
548 | fn description(&self, db: &db::RootDatabase) -> Option<String> { | ||
549 | // TODO: After type inference is done, add type information to improve the output | ||
550 | let node = self.node(db)?; | ||
551 | let node = node.borrowed(); | ||
552 | // TODO: Refactor to be have less repetition | ||
553 | visitor() | ||
554 | .visit(|node: ast::FnDef| { | ||
555 | let mut string = "fn ".to_string(); | ||
556 | node.name()?.syntax().text().push_to(&mut string); | ||
557 | Some(string) | ||
558 | }) | ||
559 | .visit(|node: ast::StructDef| { | ||
560 | let mut string = "struct ".to_string(); | ||
561 | node.name()?.syntax().text().push_to(&mut string); | ||
562 | Some(string) | ||
563 | }) | ||
564 | .visit(|node: ast::EnumDef| { | ||
565 | let mut string = "enum ".to_string(); | ||
566 | node.name()?.syntax().text().push_to(&mut string); | ||
567 | Some(string) | ||
568 | }) | ||
569 | .visit(|node: ast::TraitDef| { | ||
570 | let mut string = "trait ".to_string(); | ||
571 | node.name()?.syntax().text().push_to(&mut string); | ||
572 | Some(string) | ||
573 | }) | ||
574 | .visit(|node: ast::Module| { | ||
575 | let mut string = "mod ".to_string(); | ||
576 | node.name()?.syntax().text().push_to(&mut string); | ||
577 | Some(string) | ||
578 | }) | ||
579 | .visit(|node: ast::TypeDef| { | ||
580 | let mut string = "type ".to_string(); | ||
581 | node.name()?.syntax().text().push_to(&mut string); | ||
582 | Some(string) | ||
583 | }) | ||
584 | .visit(|node: ast::ConstDef| { | ||
585 | let mut string = "const ".to_string(); | ||
586 | node.name()?.syntax().text().push_to(&mut string); | ||
587 | Some(string) | ||
588 | }) | ||
589 | .visit(|node: ast::StaticDef| { | ||
590 | let mut string = "static ".to_string(); | ||
591 | node.name()?.syntax().text().push_to(&mut string); | ||
592 | Some(string) | ||
593 | }) | ||
594 | .accept(node)? | ||
595 | } | ||
596 | } | ||
diff --git a/crates/ra_analysis/src/lib.rs b/crates/ra_analysis/src/lib.rs index e56168510..ab935954c 100644 --- a/crates/ra_analysis/src/lib.rs +++ b/crates/ra_analysis/src/lib.rs | |||
@@ -1,6 +1,8 @@ | |||
1 | //! ra_analyzer crate is the brain of Rust analyzer. It relies on the `salsa` | 1 | //! ra_analyzer crate provides "ide-centric" APIs for the rust-analyzer. What |
2 | //! crate, which provides and incremental on-demand database of facts. | 2 | //! powers this API are the `RootDatabase` struct, which defines a `salsa` |
3 | 3 | //! database, and the `ra_hir` crate, where majority of the analysis happens. | |
4 | //! However, IDE specific bits of the analysis (most notably completion) happen | ||
5 | //! in this crate. | ||
4 | macro_rules! ctry { | 6 | macro_rules! ctry { |
5 | ($expr:expr) => { | 7 | ($expr:expr) => { |
6 | match $expr { | 8 | match $expr { |
@@ -15,34 +17,34 @@ mod imp; | |||
15 | mod completion; | 17 | mod completion; |
16 | mod symbol_index; | 18 | mod symbol_index; |
17 | pub mod mock_analysis; | 19 | pub mod mock_analysis; |
20 | mod runnables; | ||
18 | 21 | ||
19 | mod extend_selection; | 22 | mod extend_selection; |
20 | mod syntax_highlighting; | 23 | mod syntax_highlighting; |
21 | mod macros; | ||
22 | 24 | ||
23 | use std::{fmt, sync::Arc}; | 25 | use std::{fmt, sync::Arc}; |
24 | 26 | ||
25 | use rustc_hash::FxHashMap; | 27 | use rustc_hash::FxHashMap; |
26 | use ra_syntax::{SourceFileNode, TextRange, TextUnit}; | 28 | use ra_syntax::{SourceFileNode, TextRange, TextUnit, SmolStr, SyntaxKind}; |
27 | use ra_text_edit::TextEdit; | 29 | use ra_text_edit::TextEdit; |
28 | use rayon::prelude::*; | 30 | use rayon::prelude::*; |
29 | use relative_path::RelativePathBuf; | 31 | use relative_path::RelativePathBuf; |
32 | use salsa::ParallelDatabase; | ||
30 | 33 | ||
31 | use crate::{ | 34 | use crate::symbol_index::{SymbolIndex, FileSymbol}; |
32 | imp::{AnalysisHostImpl, AnalysisImpl}, | ||
33 | symbol_index::SymbolIndex, | ||
34 | }; | ||
35 | 35 | ||
36 | pub use crate::completion::{CompletionItem, CompletionItemKind, InsertText}; | 36 | pub use crate::{ |
37 | completion::{CompletionItem, CompletionItemKind, InsertText}, | ||
38 | runnables::{Runnable, RunnableKind}, | ||
39 | }; | ||
37 | pub use ra_editor::{ | 40 | pub use ra_editor::{ |
38 | FileSymbol, Fold, FoldKind, HighlightedRange, LineIndex, Runnable, RunnableKind, StructureNode, | 41 | Fold, FoldKind, HighlightedRange, LineIndex, StructureNode, Severity |
39 | Severity | ||
40 | }; | 42 | }; |
41 | pub use hir::FnSignatureInfo; | 43 | pub use hir::FnSignatureInfo; |
42 | 44 | ||
43 | pub use ra_db::{ | 45 | pub use ra_db::{ |
44 | Canceled, Cancelable, FilePosition, FileRange, | 46 | Canceled, Cancelable, FilePosition, FileRange, LocalSyntaxPtr, |
45 | CrateGraph, CrateId, SourceRootId, FileId | 47 | CrateGraph, CrateId, SourceRootId, FileId, SyntaxDatabase, FilesDatabase |
46 | }; | 48 | }; |
47 | 49 | ||
48 | #[derive(Default)] | 50 | #[derive(Default)] |
@@ -148,27 +150,6 @@ impl AnalysisChange { | |||
148 | } | 150 | } |
149 | } | 151 | } |
150 | 152 | ||
151 | /// `AnalysisHost` stores the current state of the world. | ||
152 | #[derive(Debug, Default)] | ||
153 | pub struct AnalysisHost { | ||
154 | imp: AnalysisHostImpl, | ||
155 | } | ||
156 | |||
157 | impl AnalysisHost { | ||
158 | /// Returns a snapshot of the current state, which you can query for | ||
159 | /// semantic information. | ||
160 | pub fn analysis(&self) -> Analysis { | ||
161 | Analysis { | ||
162 | imp: self.imp.analysis(), | ||
163 | } | ||
164 | } | ||
165 | /// Applies changes to the current state of the world. If there are | ||
166 | /// outstanding snapshots, they will be canceled. | ||
167 | pub fn apply_change(&mut self, change: AnalysisChange) { | ||
168 | self.imp.apply_change(change) | ||
169 | } | ||
170 | } | ||
171 | |||
172 | #[derive(Debug)] | 153 | #[derive(Debug)] |
173 | pub struct SourceChange { | 154 | pub struct SourceChange { |
174 | pub label: String, | 155 | pub label: String, |
@@ -240,6 +221,45 @@ impl Query { | |||
240 | } | 221 | } |
241 | } | 222 | } |
242 | 223 | ||
224 | /// `NavigationTarget` represents and element in the editor's UI whihc you can | ||
225 | /// click on to navigate to a particular piece of code. | ||
226 | /// | ||
227 | /// Typically, a `NavigationTarget` corresponds to some element in the source | ||
228 | /// code, like a function or a struct, but this is not strictly required. | ||
229 | #[derive(Debug)] | ||
230 | pub struct NavigationTarget { | ||
231 | file_id: FileId, | ||
232 | name: SmolStr, | ||
233 | kind: SyntaxKind, | ||
234 | range: TextRange, | ||
235 | // Should be DefId ideally | ||
236 | ptr: Option<LocalSyntaxPtr>, | ||
237 | } | ||
238 | |||
239 | impl NavigationTarget { | ||
240 | fn from_symbol(symbol: FileSymbol) -> NavigationTarget { | ||
241 | NavigationTarget { | ||
242 | file_id: symbol.file_id, | ||
243 | name: symbol.name.clone(), | ||
244 | kind: symbol.ptr.kind(), | ||
245 | range: symbol.ptr.range(), | ||
246 | ptr: Some(symbol.ptr.clone()), | ||
247 | } | ||
248 | } | ||
249 | pub fn name(&self) -> &SmolStr { | ||
250 | &self.name | ||
251 | } | ||
252 | pub fn kind(&self) -> SyntaxKind { | ||
253 | self.kind | ||
254 | } | ||
255 | pub fn file_id(&self) -> FileId { | ||
256 | self.file_id | ||
257 | } | ||
258 | pub fn range(&self) -> TextRange { | ||
259 | self.range | ||
260 | } | ||
261 | } | ||
262 | |||
243 | /// Result of "goto def" query. | 263 | /// Result of "goto def" query. |
244 | #[derive(Debug)] | 264 | #[derive(Debug)] |
245 | pub struct ReferenceResolution { | 265 | pub struct ReferenceResolution { |
@@ -248,7 +268,7 @@ pub struct ReferenceResolution { | |||
248 | /// client where the reference was. | 268 | /// client where the reference was. |
249 | pub reference_range: TextRange, | 269 | pub reference_range: TextRange, |
250 | /// What this reference resolves to. | 270 | /// What this reference resolves to. |
251 | pub resolves_to: Vec<(FileId, FileSymbol)>, | 271 | pub resolves_to: Vec<NavigationTarget>, |
252 | } | 272 | } |
253 | 273 | ||
254 | impl ReferenceResolution { | 274 | impl ReferenceResolution { |
@@ -258,9 +278,26 @@ impl ReferenceResolution { | |||
258 | resolves_to: Vec::new(), | 278 | resolves_to: Vec::new(), |
259 | } | 279 | } |
260 | } | 280 | } |
281 | } | ||
282 | |||
283 | /// `AnalysisHost` stores the current state of the world. | ||
284 | #[derive(Debug, Default)] | ||
285 | pub struct AnalysisHost { | ||
286 | db: db::RootDatabase, | ||
287 | } | ||
261 | 288 | ||
262 | fn add_resolution(&mut self, file_id: FileId, symbol: FileSymbol) { | 289 | impl AnalysisHost { |
263 | self.resolves_to.push((file_id, symbol)) | 290 | /// Returns a snapshot of the current state, which you can query for |
291 | /// semantic information. | ||
292 | pub fn analysis(&self) -> Analysis { | ||
293 | Analysis { | ||
294 | db: self.db.snapshot(), | ||
295 | } | ||
296 | } | ||
297 | /// Applies changes to the current state of the world. If there are | ||
298 | /// outstanding snapshots, they will be canceled. | ||
299 | pub fn apply_change(&mut self, change: AnalysisChange) { | ||
300 | self.db.apply_change(change) | ||
264 | } | 301 | } |
265 | } | 302 | } |
266 | 303 | ||
@@ -270,109 +307,146 @@ impl ReferenceResolution { | |||
270 | /// `Analysis` are canceled (most method return `Err(Canceled)`). | 307 | /// `Analysis` are canceled (most method return `Err(Canceled)`). |
271 | #[derive(Debug)] | 308 | #[derive(Debug)] |
272 | pub struct Analysis { | 309 | pub struct Analysis { |
273 | pub(crate) imp: AnalysisImpl, | 310 | db: salsa::Snapshot<db::RootDatabase>, |
274 | } | 311 | } |
275 | 312 | ||
276 | impl Analysis { | 313 | impl Analysis { |
314 | /// Gets the text of the source file. | ||
277 | pub fn file_text(&self, file_id: FileId) -> Arc<String> { | 315 | pub fn file_text(&self, file_id: FileId) -> Arc<String> { |
278 | self.imp.file_text(file_id) | 316 | self.db.file_text(file_id) |
279 | } | 317 | } |
318 | /// Gets the syntax tree of the file. | ||
280 | pub fn file_syntax(&self, file_id: FileId) -> SourceFileNode { | 319 | pub fn file_syntax(&self, file_id: FileId) -> SourceFileNode { |
281 | self.imp.file_syntax(file_id).clone() | 320 | self.db.source_file(file_id).clone() |
282 | } | 321 | } |
322 | /// Gets the file's `LineIndex`: data structure to convert between absolute | ||
323 | /// offsets and line/column representation. | ||
283 | pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> { | 324 | pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> { |
284 | self.imp.file_line_index(file_id) | 325 | self.db.file_lines(file_id) |
285 | } | 326 | } |
327 | /// Selects the next syntactic nodes encopasing the range. | ||
286 | pub fn extend_selection(&self, frange: FileRange) -> TextRange { | 328 | pub fn extend_selection(&self, frange: FileRange) -> TextRange { |
287 | extend_selection::extend_selection(&self.imp.db, frange) | 329 | extend_selection::extend_selection(&self.db, frange) |
288 | } | 330 | } |
331 | /// Returns position of the mathcing brace (all types of braces are | ||
332 | /// supported). | ||
289 | pub fn matching_brace(&self, file: &SourceFileNode, offset: TextUnit) -> Option<TextUnit> { | 333 | pub fn matching_brace(&self, file: &SourceFileNode, offset: TextUnit) -> Option<TextUnit> { |
290 | ra_editor::matching_brace(file, offset) | 334 | ra_editor::matching_brace(file, offset) |
291 | } | 335 | } |
336 | /// Returns a syntax tree represented as `String`, for debug purposes. | ||
337 | // FIXME: use a better name here. | ||
292 | pub fn syntax_tree(&self, file_id: FileId) -> String { | 338 | pub fn syntax_tree(&self, file_id: FileId) -> String { |
293 | let file = self.imp.file_syntax(file_id); | 339 | let file = self.db.source_file(file_id); |
294 | ra_editor::syntax_tree(&file) | 340 | ra_editor::syntax_tree(&file) |
295 | } | 341 | } |
342 | /// Returns an edit to remove all newlines in the range, cleaning up minor | ||
343 | /// stuff like trailing commas. | ||
296 | pub fn join_lines(&self, frange: FileRange) -> SourceChange { | 344 | pub fn join_lines(&self, frange: FileRange) -> SourceChange { |
297 | let file = self.imp.file_syntax(frange.file_id); | 345 | let file = self.db.source_file(frange.file_id); |
298 | SourceChange::from_local_edit(frange.file_id, ra_editor::join_lines(&file, frange.range)) | 346 | SourceChange::from_local_edit(frange.file_id, ra_editor::join_lines(&file, frange.range)) |
299 | } | 347 | } |
348 | /// Returns an edit which should be applied when opening a new line, fixing | ||
349 | /// up minor stuff like continuing the comment. | ||
300 | pub fn on_enter(&self, position: FilePosition) -> Option<SourceChange> { | 350 | pub fn on_enter(&self, position: FilePosition) -> Option<SourceChange> { |
301 | let file = self.imp.file_syntax(position.file_id); | 351 | let file = self.db.source_file(position.file_id); |
302 | let edit = ra_editor::on_enter(&file, position.offset)?; | 352 | let edit = ra_editor::on_enter(&file, position.offset)?; |
303 | let res = SourceChange::from_local_edit(position.file_id, edit); | 353 | Some(SourceChange::from_local_edit(position.file_id, edit)) |
304 | Some(res) | ||
305 | } | 354 | } |
355 | /// Returns an edit which should be applied after `=` was typed. Primaraly, | ||
356 | /// this works when adding `let =`. | ||
357 | // FIXME: use a snippet completion instead of this hack here. | ||
306 | pub fn on_eq_typed(&self, position: FilePosition) -> Option<SourceChange> { | 358 | pub fn on_eq_typed(&self, position: FilePosition) -> Option<SourceChange> { |
307 | let file = self.imp.file_syntax(position.file_id); | 359 | let file = self.db.source_file(position.file_id); |
308 | Some(SourceChange::from_local_edit( | 360 | let edit = ra_editor::on_eq_typed(&file, position.offset)?; |
309 | position.file_id, | 361 | Some(SourceChange::from_local_edit(position.file_id, edit)) |
310 | ra_editor::on_eq_typed(&file, position.offset)?, | ||
311 | )) | ||
312 | } | 362 | } |
363 | /// Returns a tree representation of symbols in the file. Useful to draw a | ||
364 | /// file outline. | ||
313 | pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> { | 365 | pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> { |
314 | let file = self.imp.file_syntax(file_id); | 366 | let file = self.db.source_file(file_id); |
315 | ra_editor::file_structure(&file) | 367 | ra_editor::file_structure(&file) |
316 | } | 368 | } |
369 | /// Returns the set of folding ranges. | ||
317 | pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> { | 370 | pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> { |
318 | let file = self.imp.file_syntax(file_id); | 371 | let file = self.db.source_file(file_id); |
319 | ra_editor::folding_ranges(&file) | 372 | ra_editor::folding_ranges(&file) |
320 | } | 373 | } |
321 | pub fn symbol_search(&self, query: Query) -> Cancelable<Vec<(FileId, FileSymbol)>> { | 374 | /// Fuzzy searches for a symbol. |
322 | self.imp.world_symbols(query) | 375 | pub fn symbol_search(&self, query: Query) -> Cancelable<Vec<NavigationTarget>> { |
376 | let res = symbol_index::world_symbols(&*self.db, query)? | ||
377 | .into_iter() | ||
378 | .map(NavigationTarget::from_symbol) | ||
379 | .collect(); | ||
380 | Ok(res) | ||
323 | } | 381 | } |
382 | /// Resolves reference to definition, but does not gurantee correctness. | ||
324 | pub fn approximately_resolve_symbol( | 383 | pub fn approximately_resolve_symbol( |
325 | &self, | 384 | &self, |
326 | position: FilePosition, | 385 | position: FilePosition, |
327 | ) -> Cancelable<Option<ReferenceResolution>> { | 386 | ) -> Cancelable<Option<ReferenceResolution>> { |
328 | self.imp.approximately_resolve_symbol(position) | 387 | self.db.approximately_resolve_symbol(position) |
329 | } | 388 | } |
389 | /// Finds all usages of the reference at point. | ||
330 | pub fn find_all_refs(&self, position: FilePosition) -> Cancelable<Vec<(FileId, TextRange)>> { | 390 | pub fn find_all_refs(&self, position: FilePosition) -> Cancelable<Vec<(FileId, TextRange)>> { |
331 | self.imp.find_all_refs(position) | 391 | self.db.find_all_refs(position) |
332 | } | 392 | } |
333 | pub fn doc_text_for(&self, file_id: FileId, symbol: FileSymbol) -> Cancelable<Option<String>> { | 393 | /// Returns documentation string for a given target. |
334 | self.imp.doc_text_for(file_id, symbol) | 394 | pub fn doc_text_for(&self, nav: NavigationTarget) -> Cancelable<Option<String>> { |
395 | self.db.doc_text_for(nav) | ||
335 | } | 396 | } |
336 | pub fn parent_module(&self, position: FilePosition) -> Cancelable<Vec<(FileId, FileSymbol)>> { | 397 | /// Returns a `mod name;` declaration whihc created the current module. |
337 | self.imp.parent_module(position) | 398 | pub fn parent_module(&self, position: FilePosition) -> Cancelable<Vec<NavigationTarget>> { |
399 | self.db.parent_module(position) | ||
338 | } | 400 | } |
401 | /// Returns crates this file belongs too. | ||
339 | pub fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> { | 402 | pub fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> { |
340 | self.imp.crate_for(file_id) | 403 | self.db.crate_for(file_id) |
341 | } | 404 | } |
405 | /// Returns the root file of the given crate. | ||
342 | pub fn crate_root(&self, crate_id: CrateId) -> Cancelable<FileId> { | 406 | pub fn crate_root(&self, crate_id: CrateId) -> Cancelable<FileId> { |
343 | Ok(self.imp.crate_root(crate_id)) | 407 | Ok(self.db.crate_root(crate_id)) |
344 | } | 408 | } |
409 | /// Returns the set of possible targets to run for the current file. | ||
345 | pub fn runnables(&self, file_id: FileId) -> Cancelable<Vec<Runnable>> { | 410 | pub fn runnables(&self, file_id: FileId) -> Cancelable<Vec<Runnable>> { |
346 | let file = self.imp.file_syntax(file_id); | 411 | runnables::runnables(&*self.db, file_id) |
347 | Ok(ra_editor::runnables(&file)) | ||
348 | } | 412 | } |
413 | /// Computes syntax highlighting for the given file. | ||
349 | pub fn highlight(&self, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> { | 414 | pub fn highlight(&self, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> { |
350 | syntax_highlighting::highlight(&*self.imp.db, file_id) | 415 | syntax_highlighting::highlight(&*self.db, file_id) |
351 | } | 416 | } |
417 | /// Computes completions at the given position. | ||
352 | pub fn completions(&self, position: FilePosition) -> Cancelable<Option<Vec<CompletionItem>>> { | 418 | pub fn completions(&self, position: FilePosition) -> Cancelable<Option<Vec<CompletionItem>>> { |
353 | self.imp.completions(position) | 419 | let completions = completion::completions(&self.db, position)?; |
420 | Ok(completions.map(|it| it.into())) | ||
354 | } | 421 | } |
422 | /// Computes assists (aks code actons aka intentions) for the given | ||
423 | /// position. | ||
355 | pub fn assists(&self, frange: FileRange) -> Cancelable<Vec<SourceChange>> { | 424 | pub fn assists(&self, frange: FileRange) -> Cancelable<Vec<SourceChange>> { |
356 | Ok(self.imp.assists(frange)) | 425 | Ok(self.db.assists(frange)) |
357 | } | 426 | } |
427 | /// Computes the set of diagnostics for the given file. | ||
358 | pub fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> { | 428 | pub fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> { |
359 | self.imp.diagnostics(file_id) | 429 | self.db.diagnostics(file_id) |
360 | } | 430 | } |
431 | /// Computes parameter information for the given call expression. | ||
361 | pub fn resolve_callable( | 432 | pub fn resolve_callable( |
362 | &self, | 433 | &self, |
363 | position: FilePosition, | 434 | position: FilePosition, |
364 | ) -> Cancelable<Option<(FnSignatureInfo, Option<usize>)>> { | 435 | ) -> Cancelable<Option<(FnSignatureInfo, Option<usize>)>> { |
365 | self.imp.resolve_callable(position) | 436 | self.db.resolve_callable(position) |
366 | } | 437 | } |
438 | /// Computes the type of the expression at the given position. | ||
367 | pub fn type_of(&self, frange: FileRange) -> Cancelable<Option<String>> { | 439 | pub fn type_of(&self, frange: FileRange) -> Cancelable<Option<String>> { |
368 | self.imp.type_of(frange) | 440 | self.db.type_of(frange) |
369 | } | 441 | } |
442 | /// Returns the edit required to rename reference at the position to the new | ||
443 | /// name. | ||
370 | pub fn rename( | 444 | pub fn rename( |
371 | &self, | 445 | &self, |
372 | position: FilePosition, | 446 | position: FilePosition, |
373 | new_name: &str, | 447 | new_name: &str, |
374 | ) -> Cancelable<Vec<SourceFileEdit>> { | 448 | ) -> Cancelable<Vec<SourceFileEdit>> { |
375 | self.imp.rename(position, new_name) | 449 | self.db.rename(position, new_name) |
376 | } | 450 | } |
377 | } | 451 | } |
378 | 452 | ||
diff --git a/crates/ra_analysis/src/macros.rs b/crates/ra_analysis/src/macros.rs deleted file mode 100644 index b9feb7fad..000000000 --- a/crates/ra_analysis/src/macros.rs +++ /dev/null | |||
@@ -1,75 +0,0 @@ | |||
1 | /// Begining of macro expansion. | ||
2 | /// | ||
3 | /// This code should be moved out of ra_analysis into hir (?) ideally. | ||
4 | use ra_syntax::{ast, AstNode, SourceFileNode, TextRange}; | ||
5 | |||
6 | use crate::{db::RootDatabase, FileId}; | ||
7 | |||
8 | pub(crate) fn expand( | ||
9 | _db: &RootDatabase, | ||
10 | _file_id: FileId, | ||
11 | macro_call: ast::MacroCall, | ||
12 | ) -> Option<MacroExpansion> { | ||
13 | let path = macro_call.path()?; | ||
14 | if path.qualifier().is_some() { | ||
15 | return None; | ||
16 | } | ||
17 | let name_ref = path.segment()?.name_ref()?; | ||
18 | if name_ref.text() != "ctry" { | ||
19 | return None; | ||
20 | } | ||
21 | |||
22 | let arg = macro_call.token_tree()?; | ||
23 | let text = format!( | ||
24 | r" | ||
25 | fn dummy() {{ | ||
26 | match {} {{ | ||
27 | None => return Ok(None), | ||
28 | Some(it) => it, | ||
29 | }} | ||
30 | }}", | ||
31 | arg.syntax().text() | ||
32 | ); | ||
33 | let file = SourceFileNode::parse(&text); | ||
34 | let match_expr = file.syntax().descendants().find_map(ast::MatchExpr::cast)?; | ||
35 | let match_arg = match_expr.expr()?; | ||
36 | let ranges_map = vec![(arg.syntax().range(), match_arg.syntax().range())]; | ||
37 | let res = MacroExpansion { | ||
38 | source_file: file, | ||
39 | ranges_map, | ||
40 | }; | ||
41 | Some(res) | ||
42 | } | ||
43 | |||
44 | pub(crate) struct MacroExpansion { | ||
45 | pub(crate) source_file: SourceFileNode, | ||
46 | pub(crate) ranges_map: Vec<(TextRange, TextRange)>, | ||
47 | } | ||
48 | |||
49 | impl MacroExpansion { | ||
50 | pub(crate) fn source_file(&self) -> &SourceFileNode { | ||
51 | &self.source_file | ||
52 | } | ||
53 | pub(crate) fn map_range_back(&self, tgt_range: TextRange) -> Option<TextRange> { | ||
54 | for (s_range, t_range) in self.ranges_map.iter() { | ||
55 | if tgt_range.is_subrange(&t_range) { | ||
56 | let tgt_at_zero_range = tgt_range - tgt_range.start(); | ||
57 | let tgt_range_offset = tgt_range.start() - t_range.start(); | ||
58 | let src_range = tgt_at_zero_range + tgt_range_offset + s_range.start(); | ||
59 | return Some(src_range); | ||
60 | } | ||
61 | } | ||
62 | None | ||
63 | } | ||
64 | pub(crate) fn map_range_forward(&self, src_range: TextRange) -> Option<TextRange> { | ||
65 | for (s_range, t_range) in self.ranges_map.iter() { | ||
66 | if src_range.is_subrange(&s_range) { | ||
67 | let src_at_zero_range = src_range - src_range.start(); | ||
68 | let src_range_offset = src_range.start() - s_range.start(); | ||
69 | let src_range = src_at_zero_range + src_range_offset + t_range.start(); | ||
70 | return Some(src_range); | ||
71 | } | ||
72 | } | ||
73 | None | ||
74 | } | ||
75 | } | ||
diff --git a/crates/ra_analysis/src/runnables.rs b/crates/ra_analysis/src/runnables.rs new file mode 100644 index 000000000..474267605 --- /dev/null +++ b/crates/ra_analysis/src/runnables.rs | |||
@@ -0,0 +1,86 @@ | |||
1 | use itertools::Itertools; | ||
2 | use ra_syntax::{ | ||
3 | ast::{self, AstNode, NameOwner, ModuleItemOwner}, | ||
4 | TextRange, SyntaxNodeRef, | ||
5 | }; | ||
6 | use ra_db::{Cancelable, SyntaxDatabase}; | ||
7 | |||
8 | use crate::{db::RootDatabase, FileId}; | ||
9 | |||
10 | #[derive(Debug)] | ||
11 | pub struct Runnable { | ||
12 | pub range: TextRange, | ||
13 | pub kind: RunnableKind, | ||
14 | } | ||
15 | |||
16 | #[derive(Debug)] | ||
17 | pub enum RunnableKind { | ||
18 | Test { name: String }, | ||
19 | TestMod { path: String }, | ||
20 | Bin, | ||
21 | } | ||
22 | |||
23 | pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Cancelable<Vec<Runnable>> { | ||
24 | let source_file = db.source_file(file_id); | ||
25 | let res = source_file | ||
26 | .syntax() | ||
27 | .descendants() | ||
28 | .filter_map(|i| runnable(db, file_id, i)) | ||
29 | .collect(); | ||
30 | Ok(res) | ||
31 | } | ||
32 | |||
33 | fn runnable(db: &RootDatabase, file_id: FileId, item: SyntaxNodeRef) -> Option<Runnable> { | ||
34 | if let Some(fn_def) = ast::FnDef::cast(item) { | ||
35 | runnable_fn(fn_def) | ||
36 | } else if let Some(m) = ast::Module::cast(item) { | ||
37 | runnable_mod(db, file_id, m) | ||
38 | } else { | ||
39 | None | ||
40 | } | ||
41 | } | ||
42 | |||
43 | fn runnable_fn(fn_def: ast::FnDef) -> Option<Runnable> { | ||
44 | let name = fn_def.name()?.text(); | ||
45 | let kind = if name == "main" { | ||
46 | RunnableKind::Bin | ||
47 | } else if fn_def.has_atom_attr("test") { | ||
48 | RunnableKind::Test { | ||
49 | name: name.to_string(), | ||
50 | } | ||
51 | } else { | ||
52 | return None; | ||
53 | }; | ||
54 | Some(Runnable { | ||
55 | range: fn_def.syntax().range(), | ||
56 | kind, | ||
57 | }) | ||
58 | } | ||
59 | |||
60 | fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Option<Runnable> { | ||
61 | let has_test_function = module | ||
62 | .item_list()? | ||
63 | .items() | ||
64 | .filter_map(|it| match it { | ||
65 | ast::ModuleItem::FnDef(it) => Some(it), | ||
66 | _ => None, | ||
67 | }) | ||
68 | .any(|f| f.has_atom_attr("test")); | ||
69 | if !has_test_function { | ||
70 | return None; | ||
71 | } | ||
72 | let range = module.syntax().range(); | ||
73 | let module = | ||
74 | hir::source_binder::module_from_child_node(db, file_id, module.syntax()).ok()??; | ||
75 | let path = module | ||
76 | .path_to_root() | ||
77 | .into_iter() | ||
78 | .rev() | ||
79 | .into_iter() | ||
80 | .filter_map(|it| it.name().map(Clone::clone)) | ||
81 | .join("::"); | ||
82 | Some(Runnable { | ||
83 | range, | ||
84 | kind: RunnableKind::TestMod { path }, | ||
85 | }) | ||
86 | } | ||
diff --git a/crates/ra_analysis/src/symbol_index.rs b/crates/ra_analysis/src/symbol_index.rs index e5bdf0aa1..e2b1c88fe 100644 --- a/crates/ra_analysis/src/symbol_index.rs +++ b/crates/ra_analysis/src/symbol_index.rs | |||
@@ -1,24 +1,48 @@ | |||
1 | //! This module handles fuzzy-searching of functions, structs and other symbols | ||
2 | //! by name across the whole workspace and dependencies. | ||
3 | //! | ||
4 | //! It works by building an incrementally-updated text-search index of all | ||
5 | //! symbols. The backbone of the index is the **awesome** `fst` crate by | ||
6 | //! @BurntSushi. | ||
7 | //! | ||
8 | //! In a nutshell, you give a set of strings to the `fst`, and it builds a | ||
9 | //! finite state machine describing this set of strtings. The strings which | ||
10 | //! could fuzzy-match a pattern can also be described by a finite state machine. | ||
11 | //! What is freakingly cool is that you can now traverse both state machines in | ||
12 | //! lock-step to enumerate the strings which are both in the input set and | ||
13 | //! fuzz-match the query. Or, more formally, given two langauges described by | ||
14 | //! fsts, one can build an product fst which describes the intersection of the | ||
15 | //! languages. | ||
16 | //! | ||
17 | //! `fst` does not support cheap updating of the index, but it supports unioning | ||
18 | //! of state machines. So, to account for changing source code, we build an fst | ||
19 | //! for each library (which is assumed to never change) and an fst for each rust | ||
20 | //! file in the current workspace, and run a query aginst the union of all | ||
21 | //! thouse fsts. | ||
1 | use std::{ | 22 | use std::{ |
23 | cmp::Ordering, | ||
2 | hash::{Hash, Hasher}, | 24 | hash::{Hash, Hasher}, |
3 | sync::Arc, | 25 | sync::Arc, |
4 | }; | 26 | }; |
5 | 27 | ||
6 | use fst::{self, Streamer}; | 28 | use fst::{self, Streamer}; |
7 | use ra_editor::{self, FileSymbol}; | ||
8 | use ra_syntax::{ | 29 | use ra_syntax::{ |
9 | SourceFileNode, | 30 | SyntaxNodeRef, SourceFileNode, SmolStr, |
31 | algo::{visit::{visitor, Visitor}, find_covering_node}, | ||
10 | SyntaxKind::{self, *}, | 32 | SyntaxKind::{self, *}, |
33 | ast::{self, NameOwner}, | ||
11 | }; | 34 | }; |
12 | use ra_db::{SyntaxDatabase, SourceRootId}; | 35 | use ra_db::{SourceRootId, FilesDatabase, LocalSyntaxPtr}; |
36 | use salsa::ParallelDatabase; | ||
13 | use rayon::prelude::*; | 37 | use rayon::prelude::*; |
14 | 38 | ||
15 | use crate::{ | 39 | use crate::{ |
16 | Cancelable, | 40 | Cancelable, FileId, Query, |
17 | FileId, Query, | 41 | db::RootDatabase, |
18 | }; | 42 | }; |
19 | 43 | ||
20 | salsa::query_group! { | 44 | salsa::query_group! { |
21 | pub(crate) trait SymbolsDatabase: SyntaxDatabase { | 45 | pub(crate) trait SymbolsDatabase: hir::db::HirDatabase { |
22 | fn file_symbols(file_id: FileId) -> Cancelable<Arc<SymbolIndex>> { | 46 | fn file_symbols(file_id: FileId) -> Cancelable<Arc<SymbolIndex>> { |
23 | type FileSymbolsQuery; | 47 | type FileSymbolsQuery; |
24 | } | 48 | } |
@@ -29,15 +53,60 @@ salsa::query_group! { | |||
29 | } | 53 | } |
30 | } | 54 | } |
31 | 55 | ||
32 | fn file_symbols(db: &impl SyntaxDatabase, file_id: FileId) -> Cancelable<Arc<SymbolIndex>> { | 56 | fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Cancelable<Arc<SymbolIndex>> { |
33 | db.check_canceled()?; | 57 | db.check_canceled()?; |
34 | let syntax = db.source_file(file_id); | 58 | let source_file = db.source_file(file_id); |
35 | Ok(Arc::new(SymbolIndex::for_file(file_id, syntax))) | 59 | let mut symbols = source_file |
60 | .syntax() | ||
61 | .descendants() | ||
62 | .filter_map(to_symbol) | ||
63 | .map(move |(name, ptr)| FileSymbol { name, ptr, file_id }) | ||
64 | .collect::<Vec<_>>(); | ||
65 | |||
66 | for (name, text_range) in hir::source_binder::macro_symbols(db, file_id)? { | ||
67 | let node = find_covering_node(source_file.syntax(), text_range); | ||
68 | let ptr = LocalSyntaxPtr::new(node); | ||
69 | symbols.push(FileSymbol { file_id, name, ptr }) | ||
70 | } | ||
71 | |||
72 | Ok(Arc::new(SymbolIndex::new(symbols))) | ||
73 | } | ||
74 | |||
75 | pub(crate) fn world_symbols(db: &RootDatabase, query: Query) -> Cancelable<Vec<FileSymbol>> { | ||
76 | /// Need to wrap Snapshot to provide `Clone` impl for `map_with` | ||
77 | struct Snap(salsa::Snapshot<RootDatabase>); | ||
78 | impl Clone for Snap { | ||
79 | fn clone(&self) -> Snap { | ||
80 | Snap(self.0.snapshot()) | ||
81 | } | ||
82 | } | ||
83 | |||
84 | let buf: Vec<Arc<SymbolIndex>> = if query.libs { | ||
85 | let snap = Snap(db.snapshot()); | ||
86 | db.library_roots() | ||
87 | .par_iter() | ||
88 | .map_with(snap, |db, &lib_id| db.0.library_symbols(lib_id)) | ||
89 | .collect() | ||
90 | } else { | ||
91 | let mut files = Vec::new(); | ||
92 | for &root in db.local_roots().iter() { | ||
93 | let sr = db.source_root(root); | ||
94 | files.extend(sr.files.values().map(|&it| it)) | ||
95 | } | ||
96 | |||
97 | let snap = Snap(db.snapshot()); | ||
98 | files | ||
99 | .par_iter() | ||
100 | .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) | ||
101 | .filter_map(|it| it.ok()) | ||
102 | .collect() | ||
103 | }; | ||
104 | Ok(query.search(&buf)) | ||
36 | } | 105 | } |
37 | 106 | ||
38 | #[derive(Default, Debug)] | 107 | #[derive(Default, Debug)] |
39 | pub(crate) struct SymbolIndex { | 108 | pub(crate) struct SymbolIndex { |
40 | symbols: Vec<(FileId, FileSymbol)>, | 109 | symbols: Vec<FileSymbol>, |
41 | map: fst::Map, | 110 | map: fst::Map, |
42 | } | 111 | } |
43 | 112 | ||
@@ -56,6 +125,17 @@ impl Hash for SymbolIndex { | |||
56 | } | 125 | } |
57 | 126 | ||
58 | impl SymbolIndex { | 127 | impl SymbolIndex { |
128 | fn new(mut symbols: Vec<FileSymbol>) -> SymbolIndex { | ||
129 | fn cmp(s1: &FileSymbol, s2: &FileSymbol) -> Ordering { | ||
130 | unicase::Ascii::new(s1.name.as_str()).cmp(&unicase::Ascii::new(s2.name.as_str())) | ||
131 | } | ||
132 | symbols.par_sort_by(cmp); | ||
133 | symbols.dedup_by(|s1, s2| cmp(s1, s2) == Ordering::Equal); | ||
134 | let names = symbols.iter().map(|it| it.name.as_str().to_lowercase()); | ||
135 | let map = fst::Map::from_iter(names.into_iter().zip(0u64..)).unwrap(); | ||
136 | SymbolIndex { symbols, map } | ||
137 | } | ||
138 | |||
59 | pub(crate) fn len(&self) -> usize { | 139 | pub(crate) fn len(&self) -> usize { |
60 | self.symbols.len() | 140 | self.symbols.len() |
61 | } | 141 | } |
@@ -63,29 +143,21 @@ impl SymbolIndex { | |||
63 | pub(crate) fn for_files( | 143 | pub(crate) fn for_files( |
64 | files: impl ParallelIterator<Item = (FileId, SourceFileNode)>, | 144 | files: impl ParallelIterator<Item = (FileId, SourceFileNode)>, |
65 | ) -> SymbolIndex { | 145 | ) -> SymbolIndex { |
66 | let mut symbols = files | 146 | let symbols = files |
67 | .flat_map(|(file_id, file)| { | 147 | .flat_map(|(file_id, file)| { |
68 | ra_editor::file_symbols(&file) | 148 | file.syntax() |
69 | .into_iter() | 149 | .descendants() |
70 | .map(move |symbol| (symbol.name.as_str().to_lowercase(), (file_id, symbol))) | 150 | .filter_map(to_symbol) |
151 | .map(move |(name, ptr)| FileSymbol { name, ptr, file_id }) | ||
71 | .collect::<Vec<_>>() | 152 | .collect::<Vec<_>>() |
72 | }) | 153 | }) |
73 | .collect::<Vec<_>>(); | 154 | .collect::<Vec<_>>(); |
74 | symbols.par_sort_by(|s1, s2| s1.0.cmp(&s2.0)); | 155 | SymbolIndex::new(symbols) |
75 | symbols.dedup_by(|s1, s2| s1.0 == s2.0); | ||
76 | let (names, symbols): (Vec<String>, Vec<(FileId, FileSymbol)>) = | ||
77 | symbols.into_iter().unzip(); | ||
78 | let map = fst::Map::from_iter(names.into_iter().zip(0u64..)).unwrap(); | ||
79 | SymbolIndex { symbols, map } | ||
80 | } | ||
81 | |||
82 | pub(crate) fn for_file(file_id: FileId, file: SourceFileNode) -> SymbolIndex { | ||
83 | SymbolIndex::for_files(rayon::iter::once((file_id, file))) | ||
84 | } | 156 | } |
85 | } | 157 | } |
86 | 158 | ||
87 | impl Query { | 159 | impl Query { |
88 | pub(crate) fn search(self, indices: &[Arc<SymbolIndex>]) -> Vec<(FileId, FileSymbol)> { | 160 | pub(crate) fn search(self, indices: &[Arc<SymbolIndex>]) -> Vec<FileSymbol> { |
89 | let mut op = fst::map::OpBuilder::new(); | 161 | let mut op = fst::map::OpBuilder::new(); |
90 | for file_symbols in indices.iter() { | 162 | for file_symbols in indices.iter() { |
91 | let automaton = fst::automaton::Subsequence::new(&self.lowercased); | 163 | let automaton = fst::automaton::Subsequence::new(&self.lowercased); |
@@ -101,14 +173,14 @@ impl Query { | |||
101 | let file_symbols = &indices[indexed_value.index]; | 173 | let file_symbols = &indices[indexed_value.index]; |
102 | let idx = indexed_value.value as usize; | 174 | let idx = indexed_value.value as usize; |
103 | 175 | ||
104 | let (file_id, symbol) = &file_symbols.symbols[idx]; | 176 | let symbol = &file_symbols.symbols[idx]; |
105 | if self.only_types && !is_type(symbol.kind) { | 177 | if self.only_types && !is_type(symbol.ptr.kind()) { |
106 | continue; | 178 | continue; |
107 | } | 179 | } |
108 | if self.exact && symbol.name != self.query { | 180 | if self.exact && symbol.name != self.query { |
109 | continue; | 181 | continue; |
110 | } | 182 | } |
111 | res.push((*file_id, symbol.clone())); | 183 | res.push(symbol.clone()); |
112 | } | 184 | } |
113 | } | 185 | } |
114 | res | 186 | res |
@@ -121,3 +193,30 @@ fn is_type(kind: SyntaxKind) -> bool { | |||
121 | _ => false, | 193 | _ => false, |
122 | } | 194 | } |
123 | } | 195 | } |
196 | |||
197 | /// The actual data that is stored in the index. It should be as compact as | ||
198 | /// possible. | ||
199 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
200 | pub(crate) struct FileSymbol { | ||
201 | pub(crate) file_id: FileId, | ||
202 | pub(crate) name: SmolStr, | ||
203 | pub(crate) ptr: LocalSyntaxPtr, | ||
204 | } | ||
205 | |||
206 | fn to_symbol(node: SyntaxNodeRef) -> Option<(SmolStr, LocalSyntaxPtr)> { | ||
207 | fn decl<'a, N: NameOwner<'a>>(node: N) -> Option<(SmolStr, LocalSyntaxPtr)> { | ||
208 | let name = node.name()?.text(); | ||
209 | let ptr = LocalSyntaxPtr::new(node.syntax()); | ||
210 | Some((name, ptr)) | ||
211 | } | ||
212 | visitor() | ||
213 | .visit(decl::<ast::FnDef>) | ||
214 | .visit(decl::<ast::StructDef>) | ||
215 | .visit(decl::<ast::EnumDef>) | ||
216 | .visit(decl::<ast::TraitDef>) | ||
217 | .visit(decl::<ast::Module>) | ||
218 | .visit(decl::<ast::TypeDef>) | ||
219 | .visit(decl::<ast::ConstDef>) | ||
220 | .visit(decl::<ast::StaticDef>) | ||
221 | .accept(node)? | ||
222 | } | ||
diff --git a/crates/ra_analysis/src/syntax_highlighting.rs b/crates/ra_analysis/src/syntax_highlighting.rs index 38219da71..35e153ca0 100644 --- a/crates/ra_analysis/src/syntax_highlighting.rs +++ b/crates/ra_analysis/src/syntax_highlighting.rs | |||
@@ -9,19 +9,19 @@ use crate::{ | |||
9 | 9 | ||
10 | pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> { | 10 | pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> { |
11 | let source_file = db.source_file(file_id); | 11 | let source_file = db.source_file(file_id); |
12 | let mut res = ra_editor::highlight(&source_file); | 12 | let mut res = ra_editor::highlight(source_file.syntax()); |
13 | for macro_call in source_file | 13 | for macro_call in source_file |
14 | .syntax() | 14 | .syntax() |
15 | .descendants() | 15 | .descendants() |
16 | .filter_map(ast::MacroCall::cast) | 16 | .filter_map(ast::MacroCall::cast) |
17 | { | 17 | { |
18 | if let Some(exp) = crate::macros::expand(db, file_id, macro_call) { | 18 | if let Some((off, exp)) = hir::MacroDef::ast_expand(macro_call) { |
19 | let mapped_ranges = ra_editor::highlight(exp.source_file()) | 19 | let mapped_ranges = ra_editor::highlight(exp.syntax().borrowed()) |
20 | .into_iter() | 20 | .into_iter() |
21 | .filter_map(|r| { | 21 | .filter_map(|r| { |
22 | let mapped_range = exp.map_range_back(r.range)?; | 22 | let mapped_range = exp.map_range_back(r.range)?; |
23 | let res = HighlightedRange { | 23 | let res = HighlightedRange { |
24 | range: mapped_range, | 24 | range: mapped_range + off, |
25 | tag: r.tag, | 25 | tag: r.tag, |
26 | }; | 26 | }; |
27 | Some(res) | 27 | Some(res) |
@@ -43,8 +43,9 @@ mod tests { | |||
43 | " | 43 | " |
44 | fn main() { | 44 | fn main() { |
45 | ctry!({ let x = 92; x}); | 45 | ctry!({ let x = 92; x}); |
46 | vec![{ let x = 92; x}]; | ||
46 | } | 47 | } |
47 | ", | 48 | ", |
48 | ); | 49 | ); |
49 | let highlights = analysis.highlight(file_id).unwrap(); | 50 | let highlights = analysis.highlight(file_id).unwrap(); |
50 | assert_eq_dbg( | 51 | assert_eq_dbg( |
@@ -53,10 +54,39 @@ mod tests { | |||
53 | HighlightedRange { range: [41; 46), tag: "macro" }, | 54 | HighlightedRange { range: [41; 46), tag: "macro" }, |
54 | HighlightedRange { range: [49; 52), tag: "keyword" }, | 55 | HighlightedRange { range: [49; 52), tag: "keyword" }, |
55 | HighlightedRange { range: [57; 59), tag: "literal" }, | 56 | HighlightedRange { range: [57; 59), tag: "literal" }, |
57 | HighlightedRange { range: [82; 86), tag: "macro" }, | ||
58 | HighlightedRange { range: [89; 92), tag: "keyword" }, | ||
59 | HighlightedRange { range: [97; 99), tag: "literal" }, | ||
56 | HighlightedRange { range: [49; 52), tag: "keyword" }, | 60 | HighlightedRange { range: [49; 52), tag: "keyword" }, |
57 | HighlightedRange { range: [53; 54), tag: "function" }, | 61 | HighlightedRange { range: [53; 54), tag: "function" }, |
58 | HighlightedRange { range: [57; 59), tag: "literal" }, | 62 | HighlightedRange { range: [57; 59), tag: "literal" }, |
59 | HighlightedRange { range: [61; 62), tag: "text" }]"#, | 63 | HighlightedRange { range: [61; 62), tag: "text" }, |
64 | HighlightedRange { range: [89; 92), tag: "keyword" }, | ||
65 | HighlightedRange { range: [93; 94), tag: "function" }, | ||
66 | HighlightedRange { range: [97; 99), tag: "literal" }, | ||
67 | HighlightedRange { range: [101; 102), tag: "text" }]"#, | ||
68 | &highlights, | ||
69 | ) | ||
70 | } | ||
71 | |||
72 | // FIXME: this test is not really necessary: artifact of the inital hacky | ||
73 | // macros implementation. | ||
74 | #[test] | ||
75 | fn highlight_query_group_macro() { | ||
76 | let (analysis, file_id) = single_file( | ||
77 | " | ||
78 | salsa::query_group! { | ||
79 | pub trait HirDatabase: SyntaxDatabase {} | ||
80 | } | ||
81 | ", | ||
82 | ); | ||
83 | let highlights = analysis.highlight(file_id).unwrap(); | ||
84 | assert_eq_dbg( | ||
85 | r#"[HighlightedRange { range: [20; 32), tag: "macro" }, | ||
86 | HighlightedRange { range: [13; 18), tag: "text" }, | ||
87 | HighlightedRange { range: [51; 54), tag: "keyword" }, | ||
88 | HighlightedRange { range: [55; 60), tag: "keyword" }, | ||
89 | HighlightedRange { range: [61; 72), tag: "function" }]"#, | ||
60 | &highlights, | 90 | &highlights, |
61 | ) | 91 | ) |
62 | } | 92 | } |