diff options
23 files changed, 503 insertions, 284 deletions
diff --git a/crates/ra_analysis/src/completion.rs b/crates/ra_analysis/src/completion.rs index 6667c06e7..7c3476e5c 100644 --- a/crates/ra_analysis/src/completion.rs +++ b/crates/ra_analysis/src/completion.rs | |||
@@ -11,10 +11,10 @@ use rustc_hash::{FxHashMap, FxHashSet}; | |||
11 | use crate::{ | 11 | use crate::{ |
12 | db::{self, SyntaxDatabase}, | 12 | db::{self, SyntaxDatabase}, |
13 | descriptors::function::FnScopes, | 13 | descriptors::function::FnScopes, |
14 | descriptors::module::{ModuleId, ModuleScope, ModuleTree}, | 14 | descriptors::module::{ModuleId, ModuleScope, ModuleTree, ModuleSource}, |
15 | descriptors::DescriptorDatabase, | 15 | descriptors::DescriptorDatabase, |
16 | input::FilesDatabase, | 16 | input::FilesDatabase, |
17 | Cancelable, FileId, | 17 | Cancelable, FilePosition, |
18 | }; | 18 | }; |
19 | 19 | ||
20 | #[derive(Debug)] | 20 | #[derive(Debug)] |
@@ -29,21 +29,21 @@ pub struct CompletionItem { | |||
29 | 29 | ||
30 | pub(crate) fn resolve_based_completion( | 30 | pub(crate) fn resolve_based_completion( |
31 | db: &db::RootDatabase, | 31 | db: &db::RootDatabase, |
32 | file_id: FileId, | 32 | position: FilePosition, |
33 | offset: TextUnit, | ||
34 | ) -> Cancelable<Option<Vec<CompletionItem>>> { | 33 | ) -> Cancelable<Option<Vec<CompletionItem>>> { |
35 | let source_root_id = db.file_source_root(file_id); | 34 | let source_root_id = db.file_source_root(position.file_id); |
36 | let file = db.file_syntax(file_id); | 35 | let file = db.file_syntax(position.file_id); |
37 | let module_tree = db.module_tree(source_root_id)?; | 36 | let module_tree = db.module_tree(source_root_id)?; |
38 | let module_id = match module_tree.any_module_for_file(file_id) { | 37 | let module_id = match module_tree.any_module_for_source(ModuleSource::File(position.file_id)) { |
39 | None => return Ok(None), | 38 | None => return Ok(None), |
40 | Some(it) => it, | 39 | Some(it) => it, |
41 | }; | 40 | }; |
42 | let file = { | 41 | let file = { |
43 | let edit = AtomEdit::insert(offset, "intellijRulezz".to_string()); | 42 | let edit = AtomEdit::insert(position.offset, "intellijRulezz".to_string()); |
44 | file.reparse(&edit) | 43 | file.reparse(&edit) |
45 | }; | 44 | }; |
46 | let target_module_id = match find_target_module(&module_tree, module_id, &file, offset) { | 45 | let target_module_id = match find_target_module(&module_tree, module_id, &file, position.offset) |
46 | { | ||
47 | None => return Ok(None), | 47 | None => return Ok(None), |
48 | Some(it) => it, | 48 | Some(it) => it, |
49 | }; | 49 | }; |
@@ -99,18 +99,17 @@ fn crate_path(name_ref: ast::NameRef) -> Option<Vec<ast::NameRef>> { | |||
99 | 99 | ||
100 | pub(crate) fn scope_completion( | 100 | pub(crate) fn scope_completion( |
101 | db: &db::RootDatabase, | 101 | db: &db::RootDatabase, |
102 | file_id: FileId, | 102 | position: FilePosition, |
103 | offset: TextUnit, | ||
104 | ) -> Option<Vec<CompletionItem>> { | 103 | ) -> Option<Vec<CompletionItem>> { |
105 | let original_file = db.file_syntax(file_id); | 104 | let original_file = db.file_syntax(position.file_id); |
106 | // Insert a fake ident to get a valid parse tree | 105 | // Insert a fake ident to get a valid parse tree |
107 | let file = { | 106 | let file = { |
108 | let edit = AtomEdit::insert(offset, "intellijRulezz".to_string()); | 107 | let edit = AtomEdit::insert(position.offset, "intellijRulezz".to_string()); |
109 | original_file.reparse(&edit) | 108 | original_file.reparse(&edit) |
110 | }; | 109 | }; |
111 | let mut has_completions = false; | 110 | let mut has_completions = false; |
112 | let mut res = Vec::new(); | 111 | let mut res = Vec::new(); |
113 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), offset) { | 112 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset) { |
114 | has_completions = true; | 113 | has_completions = true; |
115 | complete_name_ref(&file, name_ref, &mut res); | 114 | complete_name_ref(&file, name_ref, &mut res); |
116 | // special case, `trait T { fn foo(i_am_a_name_ref) {} }` | 115 | // special case, `trait T { fn foo(i_am_a_name_ref) {} }` |
@@ -129,7 +128,7 @@ pub(crate) fn scope_completion( | |||
129 | _ => (), | 128 | _ => (), |
130 | } | 129 | } |
131 | } | 130 | } |
132 | if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) { | 131 | if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), position.offset) { |
133 | if is_node::<ast::Param>(name.syntax()) { | 132 | if is_node::<ast::Param>(name.syntax()) { |
134 | has_completions = true; | 133 | has_completions = true; |
135 | param_completions(name.syntax(), &mut res); | 134 | param_completions(name.syntax(), &mut res); |
@@ -383,7 +382,7 @@ mod tests { | |||
383 | 382 | ||
384 | fn check_scope_completion(code: &str, expected_completions: &str) { | 383 | fn check_scope_completion(code: &str, expected_completions: &str) { |
385 | let (analysis, position) = single_file_with_position(code); | 384 | let (analysis, position) = single_file_with_position(code); |
386 | let completions = scope_completion(&analysis.imp.db, position.file_id, position.offset) | 385 | let completions = scope_completion(&analysis.imp.db, position) |
387 | .unwrap() | 386 | .unwrap() |
388 | .into_iter() | 387 | .into_iter() |
389 | .filter(|c| c.snippet.is_none()) | 388 | .filter(|c| c.snippet.is_none()) |
@@ -393,7 +392,7 @@ mod tests { | |||
393 | 392 | ||
394 | fn check_snippet_completion(code: &str, expected_completions: &str) { | 393 | fn check_snippet_completion(code: &str, expected_completions: &str) { |
395 | let (analysis, position) = single_file_with_position(code); | 394 | let (analysis, position) = single_file_with_position(code); |
396 | let completions = scope_completion(&analysis.imp.db, position.file_id, position.offset) | 395 | let completions = scope_completion(&analysis.imp.db, position) |
397 | .unwrap() | 396 | .unwrap() |
398 | .into_iter() | 397 | .into_iter() |
399 | .filter(|c| c.snippet.is_some()) | 398 | .filter(|c| c.snippet.is_some()) |
diff --git a/crates/ra_analysis/src/descriptors/mod.rs b/crates/ra_analysis/src/descriptors/mod.rs index eaeef54c1..56bde3849 100644 --- a/crates/ra_analysis/src/descriptors/mod.rs +++ b/crates/ra_analysis/src/descriptors/mod.rs | |||
@@ -5,16 +5,16 @@ use std::sync::Arc; | |||
5 | 5 | ||
6 | use ra_syntax::{ | 6 | use ra_syntax::{ |
7 | ast::{self, AstNode, FnDefNode}, | 7 | ast::{self, AstNode, FnDefNode}, |
8 | SmolStr, TextRange, | 8 | TextRange, |
9 | }; | 9 | }; |
10 | 10 | ||
11 | use crate::{ | 11 | use crate::{ |
12 | db::SyntaxDatabase, | 12 | db::SyntaxDatabase, |
13 | descriptors::function::{resolve_local_name, FnId, FnScopes}, | 13 | descriptors::function::{resolve_local_name, FnId, FnScopes}, |
14 | descriptors::module::{ModuleId, ModuleScope, ModuleTree}, | 14 | descriptors::module::{ModuleId, ModuleScope, ModuleTree, ModuleSource}, |
15 | input::SourceRootId, | 15 | input::SourceRootId, |
16 | syntax_ptr::LocalSyntaxPtr, | 16 | syntax_ptr::LocalSyntaxPtr, |
17 | Cancelable, FileId, | 17 | Cancelable, |
18 | }; | 18 | }; |
19 | 19 | ||
20 | salsa::query_group! { | 20 | salsa::query_group! { |
@@ -23,7 +23,7 @@ salsa::query_group! { | |||
23 | type ModuleTreeQuery; | 23 | type ModuleTreeQuery; |
24 | use fn module::imp::module_tree; | 24 | use fn module::imp::module_tree; |
25 | } | 25 | } |
26 | fn submodules(file_id: FileId) -> Cancelable<Arc<Vec<SmolStr>>> { | 26 | fn submodules(source: ModuleSource) -> Cancelable<Arc<Vec<module::imp::Submodule>>> { |
27 | type SubmodulesQuery; | 27 | type SubmodulesQuery; |
28 | use fn module::imp::submodules; | 28 | use fn module::imp::submodules; |
29 | } | 29 | } |
diff --git a/crates/ra_analysis/src/descriptors/module/imp.rs b/crates/ra_analysis/src/descriptors/module/imp.rs index 3a010ecf5..b3b1f1f21 100644 --- a/crates/ra_analysis/src/descriptors/module/imp.rs +++ b/crates/ra_analysis/src/descriptors/module/imp.rs | |||
@@ -19,25 +19,66 @@ use super::{ | |||
19 | ModuleTree, Problem, | 19 | ModuleTree, Problem, |
20 | }; | 20 | }; |
21 | 21 | ||
22 | #[derive(Clone, Hash, PartialEq, Eq, Debug)] | ||
23 | pub(crate) enum Submodule { | ||
24 | Declaration(SmolStr), | ||
25 | Definition(SmolStr, ModuleSource), | ||
26 | } | ||
27 | |||
28 | impl Submodule { | ||
29 | fn name(&self) -> &SmolStr { | ||
30 | match self { | ||
31 | Submodule::Declaration(name) => name, | ||
32 | Submodule::Definition(name, _) => name, | ||
33 | } | ||
34 | } | ||
35 | } | ||
36 | |||
22 | pub(crate) fn submodules( | 37 | pub(crate) fn submodules( |
23 | db: &impl DescriptorDatabase, | 38 | db: &impl DescriptorDatabase, |
24 | file_id: FileId, | 39 | source: ModuleSource, |
25 | ) -> Cancelable<Arc<Vec<SmolStr>>> { | 40 | ) -> Cancelable<Arc<Vec<Submodule>>> { |
26 | db::check_canceled(db)?; | 41 | db::check_canceled(db)?; |
27 | let file = db.file_syntax(file_id); | 42 | let file_id = source.file_id(); |
28 | let root = file.ast(); | 43 | let submodules = match source.resolve(db) { |
29 | let submodules = modules(root).map(|(name, _)| name).collect(); | 44 | ModuleSourceNode::Root(it) => collect_submodules(file_id, it.ast()), |
30 | Ok(Arc::new(submodules)) | 45 | ModuleSourceNode::Inline(it) => it |
46 | .ast() | ||
47 | .item_list() | ||
48 | .map(|it| collect_submodules(file_id, it)) | ||
49 | .unwrap_or_else(Vec::new), | ||
50 | }; | ||
51 | return Ok(Arc::new(submodules)); | ||
52 | |||
53 | fn collect_submodules<'a>( | ||
54 | file_id: FileId, | ||
55 | root: impl ast::ModuleItemOwner<'a>, | ||
56 | ) -> Vec<Submodule> { | ||
57 | modules(root) | ||
58 | .map(|(name, m)| { | ||
59 | if m.has_semi() { | ||
60 | Submodule::Declaration(name) | ||
61 | } else { | ||
62 | let src = ModuleSource::new_inline(file_id, m); | ||
63 | Submodule::Definition(name, src) | ||
64 | } | ||
65 | }) | ||
66 | .collect() | ||
67 | } | ||
31 | } | 68 | } |
32 | 69 | ||
33 | pub(crate) fn modules(root: ast::Root<'_>) -> impl Iterator<Item = (SmolStr, ast::Module<'_>)> { | 70 | pub(crate) fn modules<'a>( |
34 | root.modules().filter_map(|module| { | 71 | root: impl ast::ModuleItemOwner<'a>, |
35 | let name = module.name()?.text(); | 72 | ) -> impl Iterator<Item = (SmolStr, ast::Module<'a>)> { |
36 | if !module.has_semi() { | 73 | root.items() |
37 | return None; | 74 | .filter_map(|item| match item { |
38 | } | 75 | ast::ModuleItem::Module(m) => Some(m), |
39 | Some((name, module)) | 76 | _ => None, |
40 | }) | 77 | }) |
78 | .filter_map(|module| { | ||
79 | let name = module.name()?.text(); | ||
80 | Some((name, module)) | ||
81 | }) | ||
41 | } | 82 | } |
42 | 83 | ||
43 | pub(crate) fn module_scope( | 84 | pub(crate) fn module_scope( |
@@ -66,11 +107,6 @@ pub(crate) fn module_tree( | |||
66 | Ok(Arc::new(res)) | 107 | Ok(Arc::new(res)) |
67 | } | 108 | } |
68 | 109 | ||
69 | #[derive(Clone, Hash, PartialEq, Eq, Debug)] | ||
70 | pub struct Submodule { | ||
71 | pub name: SmolStr, | ||
72 | } | ||
73 | |||
74 | fn create_module_tree<'a>( | 110 | fn create_module_tree<'a>( |
75 | db: &impl DescriptorDatabase, | 111 | db: &impl DescriptorDatabase, |
76 | source_root: SourceRootId, | 112 | source_root: SourceRootId, |
@@ -85,7 +121,8 @@ fn create_module_tree<'a>( | |||
85 | 121 | ||
86 | let source_root = db.source_root(source_root); | 122 | let source_root = db.source_root(source_root); |
87 | for &file_id in source_root.files.iter() { | 123 | for &file_id in source_root.files.iter() { |
88 | if visited.contains(&file_id) { | 124 | let source = ModuleSource::File(file_id); |
125 | if visited.contains(&source) { | ||
89 | continue; // TODO: use explicit crate_roots here | 126 | continue; // TODO: use explicit crate_roots here |
90 | } | 127 | } |
91 | assert!(!roots.contains_key(&file_id)); | 128 | assert!(!roots.contains_key(&file_id)); |
@@ -96,7 +133,7 @@ fn create_module_tree<'a>( | |||
96 | &mut visited, | 133 | &mut visited, |
97 | &mut roots, | 134 | &mut roots, |
98 | None, | 135 | None, |
99 | file_id, | 136 | source, |
100 | )?; | 137 | )?; |
101 | roots.insert(file_id, module_id); | 138 | roots.insert(file_id, module_id); |
102 | } | 139 | } |
@@ -107,36 +144,63 @@ fn build_subtree( | |||
107 | db: &impl DescriptorDatabase, | 144 | db: &impl DescriptorDatabase, |
108 | source_root: &SourceRoot, | 145 | source_root: &SourceRoot, |
109 | tree: &mut ModuleTree, | 146 | tree: &mut ModuleTree, |
110 | visited: &mut FxHashSet<FileId>, | 147 | visited: &mut FxHashSet<ModuleSource>, |
111 | roots: &mut FxHashMap<FileId, ModuleId>, | 148 | roots: &mut FxHashMap<FileId, ModuleId>, |
112 | parent: Option<LinkId>, | 149 | parent: Option<LinkId>, |
113 | file_id: FileId, | 150 | source: ModuleSource, |
114 | ) -> Cancelable<ModuleId> { | 151 | ) -> Cancelable<ModuleId> { |
115 | visited.insert(file_id); | 152 | visited.insert(source); |
116 | let id = tree.push_mod(ModuleData { | 153 | let id = tree.push_mod(ModuleData { |
117 | source: ModuleSource::File(file_id), | 154 | source, |
118 | parent, | 155 | parent, |
119 | children: Vec::new(), | 156 | children: Vec::new(), |
120 | }); | 157 | }); |
121 | for name in db.submodules(file_id)?.iter() { | 158 | for sub in db.submodules(source)?.iter() { |
122 | let (points_to, problem) = resolve_submodule(file_id, name, &source_root.file_resolver); | ||
123 | let link = tree.push_link(LinkData { | 159 | let link = tree.push_link(LinkData { |
124 | name: name.clone(), | 160 | name: sub.name().clone(), |
125 | owner: id, | 161 | owner: id, |
126 | points_to: Vec::new(), | 162 | points_to: Vec::new(), |
127 | problem: None, | 163 | problem: None, |
128 | }); | 164 | }); |
129 | 165 | ||
130 | let points_to = points_to | 166 | let (points_to, problem) = match sub { |
131 | .into_iter() | 167 | Submodule::Declaration(name) => { |
132 | .map(|file_id| match roots.remove(&file_id) { | 168 | let (points_to, problem) = |
133 | Some(module_id) => { | 169 | resolve_submodule(source, &name, &source_root.file_resolver); |
134 | tree.module_mut(module_id).parent = Some(link); | 170 | let points_to = points_to |
135 | Ok(module_id) | 171 | .into_iter() |
136 | } | 172 | .map(|file_id| match roots.remove(&file_id) { |
137 | None => build_subtree(db, source_root, tree, visited, roots, Some(link), file_id), | 173 | Some(module_id) => { |
138 | }) | 174 | tree.module_mut(module_id).parent = Some(link); |
139 | .collect::<Cancelable<Vec<_>>>()?; | 175 | Ok(module_id) |
176 | } | ||
177 | None => build_subtree( | ||
178 | db, | ||
179 | source_root, | ||
180 | tree, | ||
181 | visited, | ||
182 | roots, | ||
183 | Some(link), | ||
184 | ModuleSource::File(file_id), | ||
185 | ), | ||
186 | }) | ||
187 | .collect::<Cancelable<Vec<_>>>()?; | ||
188 | (points_to, problem) | ||
189 | } | ||
190 | Submodule::Definition(_name, submodule_source) => { | ||
191 | let points_to = build_subtree( | ||
192 | db, | ||
193 | source_root, | ||
194 | tree, | ||
195 | visited, | ||
196 | roots, | ||
197 | Some(link), | ||
198 | *submodule_source, | ||
199 | )?; | ||
200 | (vec![points_to], None) | ||
201 | } | ||
202 | }; | ||
203 | |||
140 | tree.link_mut(link).points_to = points_to; | 204 | tree.link_mut(link).points_to = points_to; |
141 | tree.link_mut(link).problem = problem; | 205 | tree.link_mut(link).problem = problem; |
142 | } | 206 | } |
@@ -144,10 +208,17 @@ fn build_subtree( | |||
144 | } | 208 | } |
145 | 209 | ||
146 | fn resolve_submodule( | 210 | fn resolve_submodule( |
147 | file_id: FileId, | 211 | source: ModuleSource, |
148 | name: &SmolStr, | 212 | name: &SmolStr, |
149 | file_resolver: &FileResolverImp, | 213 | file_resolver: &FileResolverImp, |
150 | ) -> (Vec<FileId>, Option<Problem>) { | 214 | ) -> (Vec<FileId>, Option<Problem>) { |
215 | let file_id = match source { | ||
216 | ModuleSource::File(it) => it, | ||
217 | ModuleSource::Inline(..) => { | ||
218 | // TODO | ||
219 | return (Vec::new(), None); | ||
220 | } | ||
221 | }; | ||
151 | let mod_name = file_resolver.file_stem(file_id); | 222 | let mod_name = file_resolver.file_stem(file_id); |
152 | let is_dir_owner = mod_name == "mod" || mod_name == "lib" || mod_name == "main"; | 223 | let is_dir_owner = mod_name == "mod" || mod_name == "lib" || mod_name == "main"; |
153 | 224 | ||
diff --git a/crates/ra_analysis/src/descriptors/module/mod.rs b/crates/ra_analysis/src/descriptors/module/mod.rs index e22489fc1..3d799ba05 100644 --- a/crates/ra_analysis/src/descriptors/module/mod.rs +++ b/crates/ra_analysis/src/descriptors/module/mod.rs | |||
@@ -25,17 +25,17 @@ pub(crate) struct ModuleTree { | |||
25 | } | 25 | } |
26 | 26 | ||
27 | impl ModuleTree { | 27 | impl ModuleTree { |
28 | pub(crate) fn modules_for_file(&self, file_id: FileId) -> Vec<ModuleId> { | 28 | pub(crate) fn modules_for_source(&self, source: ModuleSource) -> Vec<ModuleId> { |
29 | self.mods | 29 | self.mods |
30 | .iter() | 30 | .iter() |
31 | .enumerate() | 31 | .enumerate() |
32 | .filter(|(_idx, it)| it.source.is_file(file_id)) | 32 | .filter(|(_idx, it)| it.source == source) |
33 | .map(|(idx, _)| ModuleId(idx as u32)) | 33 | .map(|(idx, _)| ModuleId(idx as u32)) |
34 | .collect() | 34 | .collect() |
35 | } | 35 | } |
36 | 36 | ||
37 | pub(crate) fn any_module_for_file(&self, file_id: FileId) -> Option<ModuleId> { | 37 | pub(crate) fn any_module_for_source(&self, source: ModuleSource) -> Option<ModuleId> { |
38 | self.modules_for_file(file_id).pop() | 38 | self.modules_for_source(source).pop() |
39 | } | 39 | } |
40 | } | 40 | } |
41 | 41 | ||
@@ -142,9 +142,7 @@ impl LinkId { | |||
142 | .1; | 142 | .1; |
143 | ast.into() | 143 | ast.into() |
144 | } | 144 | } |
145 | ModuleSourceNode::Inline(..) => { | 145 | ModuleSourceNode::Inline(it) => it, |
146 | unimplemented!("https://github.com/rust-analyzer/rust-analyzer/issues/181") | ||
147 | } | ||
148 | } | 146 | } |
149 | } | 147 | } |
150 | } | 148 | } |
@@ -157,6 +155,12 @@ struct ModuleData { | |||
157 | } | 155 | } |
158 | 156 | ||
159 | impl ModuleSource { | 157 | impl ModuleSource { |
158 | pub(crate) fn new_inline(file_id: FileId, module: ast::Module) -> ModuleSource { | ||
159 | assert!(!module.has_semi()); | ||
160 | let ptr = SyntaxPtr::new(file_id, module.syntax()); | ||
161 | ModuleSource::Inline(ptr) | ||
162 | } | ||
163 | |||
160 | pub(crate) fn as_file(self) -> Option<FileId> { | 164 | pub(crate) fn as_file(self) -> Option<FileId> { |
161 | match self { | 165 | match self { |
162 | ModuleSource::File(f) => Some(f), | 166 | ModuleSource::File(f) => Some(f), |
@@ -164,6 +168,13 @@ impl ModuleSource { | |||
164 | } | 168 | } |
165 | } | 169 | } |
166 | 170 | ||
171 | pub(crate) fn file_id(self) -> FileId { | ||
172 | match self { | ||
173 | ModuleSource::File(f) => f, | ||
174 | ModuleSource::Inline(ptr) => ptr.file_id(), | ||
175 | } | ||
176 | } | ||
177 | |||
167 | fn resolve(self, db: &impl SyntaxDatabase) -> ModuleSourceNode { | 178 | fn resolve(self, db: &impl SyntaxDatabase) -> ModuleSourceNode { |
168 | match self { | 179 | match self { |
169 | ModuleSource::File(file_id) => { | 180 | ModuleSource::File(file_id) => { |
@@ -178,10 +189,6 @@ impl ModuleSource { | |||
178 | } | 189 | } |
179 | } | 190 | } |
180 | } | 191 | } |
181 | |||
182 | fn is_file(self, file_id: FileId) -> bool { | ||
183 | self.as_file() == Some(file_id) | ||
184 | } | ||
185 | } | 192 | } |
186 | 193 | ||
187 | #[derive(Hash, Debug, PartialEq, Eq)] | 194 | #[derive(Hash, Debug, PartialEq, Eq)] |
diff --git a/crates/ra_analysis/src/imp.rs b/crates/ra_analysis/src/imp.rs index 4f337d163..f2482559f 100644 --- a/crates/ra_analysis/src/imp.rs +++ b/crates/ra_analysis/src/imp.rs | |||
@@ -27,7 +27,7 @@ use crate::{ | |||
27 | input::{FilesDatabase, SourceRoot, SourceRootId, WORKSPACE}, | 27 | input::{FilesDatabase, SourceRoot, SourceRootId, WORKSPACE}, |
28 | symbol_index::SymbolIndex, | 28 | symbol_index::SymbolIndex, |
29 | AnalysisChange, Cancelable, CrateGraph, CrateId, Diagnostic, FileId, FileResolver, | 29 | AnalysisChange, Cancelable, CrateGraph, CrateId, Diagnostic, FileId, FileResolver, |
30 | FileSystemEdit, Position, Query, SourceChange, SourceFileEdit, | 30 | FileSystemEdit, FilePosition, Query, SourceChange, SourceFileEdit, |
31 | }; | 31 | }; |
32 | 32 | ||
33 | #[derive(Clone, Debug)] | 33 | #[derive(Clone, Debug)] |
@@ -220,27 +220,29 @@ impl AnalysisImpl { | |||
220 | let source_root = self.db.file_source_root(file_id); | 220 | let source_root = self.db.file_source_root(file_id); |
221 | self.db.module_tree(source_root) | 221 | self.db.module_tree(source_root) |
222 | } | 222 | } |
223 | pub fn parent_module(&self, file_id: FileId) -> Cancelable<Vec<(FileId, FileSymbol)>> { | 223 | pub fn parent_module(&self, position: FilePosition) -> Cancelable<Vec<(FileId, FileSymbol)>> { |
224 | let module_tree = self.module_tree(file_id)?; | 224 | let module_tree = self.module_tree(position.file_id)?; |
225 | let file = self.db.file_syntax(position.file_id); | ||
226 | let module_source = match find_node_at_offset::<ast::Module>(file.syntax(), position.offset) | ||
227 | { | ||
228 | Some(m) if !m.has_semi() => ModuleSource::new_inline(position.file_id, m), | ||
229 | _ => ModuleSource::File(position.file_id), | ||
230 | }; | ||
225 | 231 | ||
226 | let res = module_tree | 232 | let res = module_tree |
227 | .modules_for_file(file_id) | 233 | .modules_for_source(module_source) |
228 | .into_iter() | 234 | .into_iter() |
229 | .filter_map(|module_id| { | 235 | .filter_map(|module_id| { |
230 | let link = module_id.parent_link(&module_tree)?; | 236 | let link = module_id.parent_link(&module_tree)?; |
231 | let file_id = match link.owner(&module_tree).source(&module_tree) { | 237 | let file_id = link.owner(&module_tree).source(&module_tree).file_id(); |
232 | ModuleSource::File(file_id) => file_id, | ||
233 | ModuleSource::Inline(..) => { | ||
234 | //TODO: https://github.com/rust-analyzer/rust-analyzer/issues/181 | ||
235 | return None; | ||
236 | } | ||
237 | }; | ||
238 | let decl = link.bind_source(&module_tree, &*self.db); | 238 | let decl = link.bind_source(&module_tree, &*self.db); |
239 | let decl = decl.ast(); | 239 | let decl = decl.ast(); |
240 | 240 | ||
241 | let decl_name = decl.name().unwrap(); | ||
242 | |||
241 | let sym = FileSymbol { | 243 | let sym = FileSymbol { |
242 | name: decl.name().unwrap().text(), | 244 | name: decl_name.text(), |
243 | node_range: decl.syntax().range(), | 245 | node_range: decl_name.syntax().range(), |
244 | kind: MODULE, | 246 | kind: MODULE, |
245 | }; | 247 | }; |
246 | Some((file_id, sym)) | 248 | Some((file_id, sym)) |
@@ -252,7 +254,7 @@ impl AnalysisImpl { | |||
252 | let module_tree = self.module_tree(file_id)?; | 254 | let module_tree = self.module_tree(file_id)?; |
253 | let crate_graph = self.db.crate_graph(); | 255 | let crate_graph = self.db.crate_graph(); |
254 | let res = module_tree | 256 | let res = module_tree |
255 | .modules_for_file(file_id) | 257 | .modules_for_source(ModuleSource::File(file_id)) |
256 | .into_iter() | 258 | .into_iter() |
257 | .map(|it| it.root(&module_tree)) | 259 | .map(|it| it.root(&module_tree)) |
258 | .filter_map(|it| it.source(&module_tree).as_file()) | 260 | .filter_map(|it| it.source(&module_tree).as_file()) |
@@ -264,18 +266,14 @@ impl AnalysisImpl { | |||
264 | pub fn crate_root(&self, crate_id: CrateId) -> FileId { | 266 | pub fn crate_root(&self, crate_id: CrateId) -> FileId { |
265 | self.db.crate_graph().crate_roots[&crate_id] | 267 | self.db.crate_graph().crate_roots[&crate_id] |
266 | } | 268 | } |
267 | pub fn completions( | 269 | pub fn completions(&self, position: FilePosition) -> Cancelable<Option<Vec<CompletionItem>>> { |
268 | &self, | ||
269 | file_id: FileId, | ||
270 | offset: TextUnit, | ||
271 | ) -> Cancelable<Option<Vec<CompletionItem>>> { | ||
272 | let mut res = Vec::new(); | 270 | let mut res = Vec::new(); |
273 | let mut has_completions = false; | 271 | let mut has_completions = false; |
274 | if let Some(scope_based) = scope_completion(&self.db, file_id, offset) { | 272 | if let Some(scope_based) = scope_completion(&self.db, position) { |
275 | res.extend(scope_based); | 273 | res.extend(scope_based); |
276 | has_completions = true; | 274 | has_completions = true; |
277 | } | 275 | } |
278 | if let Some(scope_based) = resolve_based_completion(&self.db, file_id, offset)? { | 276 | if let Some(scope_based) = resolve_based_completion(&self.db, position)? { |
279 | res.extend(scope_based); | 277 | res.extend(scope_based); |
280 | has_completions = true; | 278 | has_completions = true; |
281 | } | 279 | } |
@@ -284,18 +282,19 @@ impl AnalysisImpl { | |||
284 | } | 282 | } |
285 | pub fn approximately_resolve_symbol( | 283 | pub fn approximately_resolve_symbol( |
286 | &self, | 284 | &self, |
287 | file_id: FileId, | 285 | position: FilePosition, |
288 | offset: TextUnit, | ||
289 | ) -> Cancelable<Vec<(FileId, FileSymbol)>> { | 286 | ) -> Cancelable<Vec<(FileId, FileSymbol)>> { |
290 | let module_tree = self.module_tree(file_id)?; | 287 | let module_tree = self.module_tree(position.file_id)?; |
291 | let file = self.db.file_syntax(file_id); | 288 | let file = self.db.file_syntax(position.file_id); |
292 | let syntax = file.syntax(); | 289 | let syntax = file.syntax(); |
293 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) { | 290 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) { |
294 | // First try to resolve the symbol locally | 291 | // First try to resolve the symbol locally |
295 | return if let Some((name, range)) = resolve_local_name(&self.db, file_id, name_ref) { | 292 | return if let Some((name, range)) = |
293 | resolve_local_name(&self.db, position.file_id, name_ref) | ||
294 | { | ||
296 | let mut vec = vec![]; | 295 | let mut vec = vec![]; |
297 | vec.push(( | 296 | vec.push(( |
298 | file_id, | 297 | position.file_id, |
299 | FileSymbol { | 298 | FileSymbol { |
300 | name, | 299 | name, |
301 | node_range: range, | 300 | node_range: range, |
@@ -308,10 +307,10 @@ impl AnalysisImpl { | |||
308 | self.index_resolve(name_ref) | 307 | self.index_resolve(name_ref) |
309 | }; | 308 | }; |
310 | } | 309 | } |
311 | if let Some(name) = find_node_at_offset::<ast::Name>(syntax, offset) { | 310 | if let Some(name) = find_node_at_offset::<ast::Name>(syntax, position.offset) { |
312 | if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) { | 311 | if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) { |
313 | if module.has_semi() { | 312 | if module.has_semi() { |
314 | let file_ids = self.resolve_module(&*module_tree, file_id, module); | 313 | let file_ids = self.resolve_module(&*module_tree, position.file_id, module); |
315 | 314 | ||
316 | let res = file_ids | 315 | let res = file_ids |
317 | .into_iter() | 316 | .into_iter() |
@@ -336,16 +335,17 @@ impl AnalysisImpl { | |||
336 | Ok(vec![]) | 335 | Ok(vec![]) |
337 | } | 336 | } |
338 | 337 | ||
339 | pub fn find_all_refs(&self, file_id: FileId, offset: TextUnit) -> Vec<(FileId, TextRange)> { | 338 | pub fn find_all_refs(&self, position: FilePosition) -> Vec<(FileId, TextRange)> { |
340 | let file = self.db.file_syntax(file_id); | 339 | let file = self.db.file_syntax(position.file_id); |
341 | let syntax = file.syntax(); | 340 | let syntax = file.syntax(); |
342 | 341 | ||
343 | // Find the binding associated with the offset | 342 | // Find the binding associated with the offset |
344 | let maybe_binding = find_node_at_offset::<ast::BindPat>(syntax, offset).or_else(|| { | 343 | let maybe_binding = |
345 | let name_ref = find_node_at_offset::<ast::NameRef>(syntax, offset)?; | 344 | find_node_at_offset::<ast::BindPat>(syntax, position.offset).or_else(|| { |
346 | let resolved = resolve_local_name(&self.db, file_id, name_ref)?; | 345 | let name_ref = find_node_at_offset::<ast::NameRef>(syntax, position.offset)?; |
347 | find_node_at_offset::<ast::BindPat>(syntax, resolved.1.end()) | 346 | let resolved = resolve_local_name(&self.db, position.file_id, name_ref)?; |
348 | }); | 347 | find_node_at_offset::<ast::BindPat>(syntax, resolved.1.end()) |
348 | }); | ||
349 | 349 | ||
350 | let binding = match maybe_binding { | 350 | let binding = match maybe_binding { |
351 | None => return Vec::new(), | 351 | None => return Vec::new(), |
@@ -354,11 +354,11 @@ impl AnalysisImpl { | |||
354 | 354 | ||
355 | let decl = DeclarationDescriptor::new(binding); | 355 | let decl = DeclarationDescriptor::new(binding); |
356 | 356 | ||
357 | let mut ret = vec![(file_id, decl.range)]; | 357 | let mut ret = vec![(position.file_id, decl.range)]; |
358 | ret.extend( | 358 | ret.extend( |
359 | decl.find_all_refs() | 359 | decl.find_all_refs() |
360 | .into_iter() | 360 | .into_iter() |
361 | .map(|ref_desc| (file_id, ref_desc.range)), | 361 | .map(|ref_desc| (position.file_id, ref_desc.range)), |
362 | ); | 362 | ); |
363 | 363 | ||
364 | ret | 364 | ret |
@@ -376,7 +376,7 @@ impl AnalysisImpl { | |||
376 | fix: None, | 376 | fix: None, |
377 | }) | 377 | }) |
378 | .collect::<Vec<_>>(); | 378 | .collect::<Vec<_>>(); |
379 | if let Some(m) = module_tree.any_module_for_file(file_id) { | 379 | if let Some(m) = module_tree.any_module_for_source(ModuleSource::File(file_id)) { |
380 | for (name_node, problem) in m.problems(&module_tree, &*self.db) { | 380 | for (name_node, problem) in m.problems(&module_tree, &*self.db) { |
381 | let diag = match problem { | 381 | let diag = match problem { |
382 | Problem::UnresolvedModule { candidate } => { | 382 | Problem::UnresolvedModule { candidate } => { |
@@ -452,14 +452,13 @@ impl AnalysisImpl { | |||
452 | 452 | ||
453 | pub fn resolve_callable( | 453 | pub fn resolve_callable( |
454 | &self, | 454 | &self, |
455 | file_id: FileId, | 455 | position: FilePosition, |
456 | offset: TextUnit, | ||
457 | ) -> Cancelable<Option<(FnDescriptor, Option<usize>)>> { | 456 | ) -> Cancelable<Option<(FnDescriptor, Option<usize>)>> { |
458 | let file = self.db.file_syntax(file_id); | 457 | let file = self.db.file_syntax(position.file_id); |
459 | let syntax = file.syntax(); | 458 | let syntax = file.syntax(); |
460 | 459 | ||
461 | // Find the calling expression and it's NameRef | 460 | // Find the calling expression and it's NameRef |
462 | let calling_node = match FnCallNode::with_node(syntax, offset) { | 461 | let calling_node = match FnCallNode::with_node(syntax, position.offset) { |
463 | Some(node) => node, | 462 | Some(node) => node, |
464 | None => return Ok(None), | 463 | None => return Ok(None), |
465 | }; | 464 | }; |
@@ -494,7 +493,7 @@ impl AnalysisImpl { | |||
494 | if let Some(ref arg_list) = calling_node.arg_list() { | 493 | if let Some(ref arg_list) = calling_node.arg_list() { |
495 | let start = arg_list.syntax().range().start(); | 494 | let start = arg_list.syntax().range().start(); |
496 | 495 | ||
497 | let range_search = TextRange::from_to(start, offset); | 496 | let range_search = TextRange::from_to(start, position.offset); |
498 | let mut commas: usize = arg_list | 497 | let mut commas: usize = arg_list |
499 | .syntax() | 498 | .syntax() |
500 | .text() | 499 | .text() |
@@ -539,7 +538,7 @@ impl AnalysisImpl { | |||
539 | Some(name) => name.text(), | 538 | Some(name) => name.text(), |
540 | None => return Vec::new(), | 539 | None => return Vec::new(), |
541 | }; | 540 | }; |
542 | let module_id = match module_tree.any_module_for_file(file_id) { | 541 | let module_id = match module_tree.any_module_for_source(ModuleSource::File(file_id)) { |
543 | Some(id) => id, | 542 | Some(id) => id, |
544 | None => return Vec::new(), | 543 | None => return Vec::new(), |
545 | }; | 544 | }; |
@@ -563,7 +562,7 @@ impl SourceChange { | |||
563 | file_system_edits: vec![], | 562 | file_system_edits: vec![], |
564 | cursor_position: edit | 563 | cursor_position: edit |
565 | .cursor_position | 564 | .cursor_position |
566 | .map(|offset| Position { offset, file_id }), | 565 | .map(|offset| FilePosition { offset, file_id }), |
567 | } | 566 | } |
568 | } | 567 | } |
569 | } | 568 | } |
diff --git a/crates/ra_analysis/src/lib.rs b/crates/ra_analysis/src/lib.rs index 4e4c65f08..0ea9ebee7 100644 --- a/crates/ra_analysis/src/lib.rs +++ b/crates/ra_analysis/src/lib.rs | |||
@@ -119,18 +119,18 @@ impl AnalysisHost { | |||
119 | } | 119 | } |
120 | } | 120 | } |
121 | 121 | ||
122 | #[derive(Clone, Copy, Debug)] | ||
123 | pub struct FilePosition { | ||
124 | pub file_id: FileId, | ||
125 | pub offset: TextUnit, | ||
126 | } | ||
127 | |||
122 | #[derive(Debug)] | 128 | #[derive(Debug)] |
123 | pub struct SourceChange { | 129 | pub struct SourceChange { |
124 | pub label: String, | 130 | pub label: String, |
125 | pub source_file_edits: Vec<SourceFileEdit>, | 131 | pub source_file_edits: Vec<SourceFileEdit>, |
126 | pub file_system_edits: Vec<FileSystemEdit>, | 132 | pub file_system_edits: Vec<FileSystemEdit>, |
127 | pub cursor_position: Option<Position>, | 133 | pub cursor_position: Option<FilePosition>, |
128 | } | ||
129 | |||
130 | #[derive(Debug)] | ||
131 | pub struct Position { | ||
132 | pub file_id: FileId, | ||
133 | pub offset: TextUnit, | ||
134 | } | 134 | } |
135 | 135 | ||
136 | #[derive(Debug)] | 136 | #[derive(Debug)] |
@@ -224,18 +224,18 @@ impl Analysis { | |||
224 | let file = self.imp.file_syntax(file_id); | 224 | let file = self.imp.file_syntax(file_id); |
225 | SourceChange::from_local_edit(file_id, "join lines", ra_editor::join_lines(&file, range)) | 225 | SourceChange::from_local_edit(file_id, "join lines", ra_editor::join_lines(&file, range)) |
226 | } | 226 | } |
227 | pub fn on_enter(&self, file_id: FileId, offset: TextUnit) -> Option<SourceChange> { | 227 | pub fn on_enter(&self, position: FilePosition) -> Option<SourceChange> { |
228 | let file = self.imp.file_syntax(file_id); | 228 | let file = self.imp.file_syntax(position.file_id); |
229 | let edit = ra_editor::on_enter(&file, offset)?; | 229 | let edit = ra_editor::on_enter(&file, position.offset)?; |
230 | let res = SourceChange::from_local_edit(file_id, "on enter", edit); | 230 | let res = SourceChange::from_local_edit(position.file_id, "on enter", edit); |
231 | Some(res) | 231 | Some(res) |
232 | } | 232 | } |
233 | pub fn on_eq_typed(&self, file_id: FileId, offset: TextUnit) -> Option<SourceChange> { | 233 | pub fn on_eq_typed(&self, position: FilePosition) -> Option<SourceChange> { |
234 | let file = self.imp.file_syntax(file_id); | 234 | let file = self.imp.file_syntax(position.file_id); |
235 | Some(SourceChange::from_local_edit( | 235 | Some(SourceChange::from_local_edit( |
236 | file_id, | 236 | position.file_id, |
237 | "add semicolon", | 237 | "add semicolon", |
238 | ra_editor::on_eq_typed(&file, offset)?, | 238 | ra_editor::on_eq_typed(&file, position.offset)?, |
239 | )) | 239 | )) |
240 | } | 240 | } |
241 | pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> { | 241 | pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> { |
@@ -251,20 +251,15 @@ impl Analysis { | |||
251 | } | 251 | } |
252 | pub fn approximately_resolve_symbol( | 252 | pub fn approximately_resolve_symbol( |
253 | &self, | 253 | &self, |
254 | file_id: FileId, | 254 | position: FilePosition, |
255 | offset: TextUnit, | ||
256 | ) -> Cancelable<Vec<(FileId, FileSymbol)>> { | 255 | ) -> Cancelable<Vec<(FileId, FileSymbol)>> { |
257 | self.imp.approximately_resolve_symbol(file_id, offset) | 256 | self.imp.approximately_resolve_symbol(position) |
258 | } | 257 | } |
259 | pub fn find_all_refs( | 258 | pub fn find_all_refs(&self, position: FilePosition) -> Cancelable<Vec<(FileId, TextRange)>> { |
260 | &self, | 259 | Ok(self.imp.find_all_refs(position)) |
261 | file_id: FileId, | ||
262 | offset: TextUnit, | ||
263 | ) -> Cancelable<Vec<(FileId, TextRange)>> { | ||
264 | Ok(self.imp.find_all_refs(file_id, offset)) | ||
265 | } | 260 | } |
266 | pub fn parent_module(&self, file_id: FileId) -> Cancelable<Vec<(FileId, FileSymbol)>> { | 261 | pub fn parent_module(&self, position: FilePosition) -> Cancelable<Vec<(FileId, FileSymbol)>> { |
267 | self.imp.parent_module(file_id) | 262 | self.imp.parent_module(position) |
268 | } | 263 | } |
269 | pub fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> { | 264 | pub fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> { |
270 | self.imp.crate_for(file_id) | 265 | self.imp.crate_for(file_id) |
@@ -280,12 +275,8 @@ impl Analysis { | |||
280 | let file = self.imp.file_syntax(file_id); | 275 | let file = self.imp.file_syntax(file_id); |
281 | Ok(ra_editor::highlight(&file)) | 276 | Ok(ra_editor::highlight(&file)) |
282 | } | 277 | } |
283 | pub fn completions( | 278 | pub fn completions(&self, position: FilePosition) -> Cancelable<Option<Vec<CompletionItem>>> { |
284 | &self, | 279 | self.imp.completions(position) |
285 | file_id: FileId, | ||
286 | offset: TextUnit, | ||
287 | ) -> Cancelable<Option<Vec<CompletionItem>>> { | ||
288 | self.imp.completions(file_id, offset) | ||
289 | } | 280 | } |
290 | pub fn assists(&self, file_id: FileId, range: TextRange) -> Cancelable<Vec<SourceChange>> { | 281 | pub fn assists(&self, file_id: FileId, range: TextRange) -> Cancelable<Vec<SourceChange>> { |
291 | Ok(self.imp.assists(file_id, range)) | 282 | Ok(self.imp.assists(file_id, range)) |
@@ -295,10 +286,9 @@ impl Analysis { | |||
295 | } | 286 | } |
296 | pub fn resolve_callable( | 287 | pub fn resolve_callable( |
297 | &self, | 288 | &self, |
298 | file_id: FileId, | 289 | position: FilePosition, |
299 | offset: TextUnit, | ||
300 | ) -> Cancelable<Option<(FnDescriptor, Option<usize>)>> { | 290 | ) -> Cancelable<Option<(FnDescriptor, Option<usize>)>> { |
301 | self.imp.resolve_callable(file_id, offset) | 291 | self.imp.resolve_callable(position) |
302 | } | 292 | } |
303 | } | 293 | } |
304 | 294 | ||
diff --git a/crates/ra_analysis/src/mock_analysis.rs b/crates/ra_analysis/src/mock_analysis.rs index a7134a0e6..8e8f969f4 100644 --- a/crates/ra_analysis/src/mock_analysis.rs +++ b/crates/ra_analysis/src/mock_analysis.rs | |||
@@ -1,16 +1,9 @@ | |||
1 | use std::sync::Arc; | 1 | use std::sync::Arc; |
2 | 2 | ||
3 | use ra_syntax::TextUnit; | ||
4 | use relative_path::{RelativePath, RelativePathBuf}; | 3 | use relative_path::{RelativePath, RelativePathBuf}; |
5 | use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER}; | 4 | use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER}; |
6 | 5 | ||
7 | use crate::{Analysis, AnalysisChange, AnalysisHost, FileId, FileResolver}; | 6 | use crate::{Analysis, AnalysisChange, AnalysisHost, FileId, FileResolver, FilePosition}; |
8 | |||
9 | #[derive(Debug)] | ||
10 | pub struct FilePosition { | ||
11 | pub file_id: FileId, | ||
12 | pub offset: TextUnit, | ||
13 | } | ||
14 | 7 | ||
15 | /// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis | 8 | /// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis |
16 | /// from a set of in-memory files. | 9 | /// from a set of in-memory files. |
diff --git a/crates/ra_analysis/src/syntax_ptr.rs b/crates/ra_analysis/src/syntax_ptr.rs index 4db1529c2..4afb1fc93 100644 --- a/crates/ra_analysis/src/syntax_ptr.rs +++ b/crates/ra_analysis/src/syntax_ptr.rs | |||
@@ -22,6 +22,10 @@ impl SyntaxPtr { | |||
22 | let local = LocalSyntaxPtr::new(node); | 22 | let local = LocalSyntaxPtr::new(node); |
23 | SyntaxPtr { file_id, local } | 23 | SyntaxPtr { file_id, local } |
24 | } | 24 | } |
25 | |||
26 | pub(crate) fn file_id(self) -> FileId { | ||
27 | self.file_id | ||
28 | } | ||
25 | } | 29 | } |
26 | 30 | ||
27 | /// A pionter to a syntax node inside a file. | 31 | /// A pionter to a syntax node inside a file. |
diff --git a/crates/ra_analysis/tests/tests.rs b/crates/ra_analysis/tests/tests.rs index c2754c8e4..c605d34f0 100644 --- a/crates/ra_analysis/tests/tests.rs +++ b/crates/ra_analysis/tests/tests.rs | |||
@@ -15,10 +15,7 @@ use ra_analysis::{ | |||
15 | 15 | ||
16 | fn get_signature(text: &str) -> (FnDescriptor, Option<usize>) { | 16 | fn get_signature(text: &str) -> (FnDescriptor, Option<usize>) { |
17 | let (analysis, position) = single_file_with_position(text); | 17 | let (analysis, position) = single_file_with_position(text); |
18 | analysis | 18 | analysis.resolve_callable(position).unwrap().unwrap() |
19 | .resolve_callable(position.file_id, position.offset) | ||
20 | .unwrap() | ||
21 | .unwrap() | ||
22 | } | 19 | } |
23 | 20 | ||
24 | #[test] | 21 | #[test] |
@@ -32,9 +29,7 @@ fn test_resolve_module() { | |||
32 | ", | 29 | ", |
33 | ); | 30 | ); |
34 | 31 | ||
35 | let symbols = analysis | 32 | let symbols = analysis.approximately_resolve_symbol(pos).unwrap(); |
36 | .approximately_resolve_symbol(pos.file_id, pos.offset) | ||
37 | .unwrap(); | ||
38 | assert_eq_dbg( | 33 | assert_eq_dbg( |
39 | r#"[(FileId(2), FileSymbol { name: "foo", node_range: [0; 0), kind: MODULE })]"#, | 34 | r#"[(FileId(2), FileSymbol { name: "foo", node_range: [0; 0), kind: MODULE })]"#, |
40 | &symbols, | 35 | &symbols, |
@@ -49,9 +44,7 @@ fn test_resolve_module() { | |||
49 | ", | 44 | ", |
50 | ); | 45 | ); |
51 | 46 | ||
52 | let symbols = analysis | 47 | let symbols = analysis.approximately_resolve_symbol(pos).unwrap(); |
53 | .approximately_resolve_symbol(pos.file_id, pos.offset) | ||
54 | .unwrap(); | ||
55 | assert_eq_dbg( | 48 | assert_eq_dbg( |
56 | r#"[(FileId(2), FileSymbol { name: "foo", node_range: [0; 0), kind: MODULE })]"#, | 49 | r#"[(FileId(2), FileSymbol { name: "foo", node_range: [0; 0), kind: MODULE })]"#, |
57 | &symbols, | 50 | &symbols, |
@@ -92,9 +85,28 @@ fn test_resolve_parent_module() { | |||
92 | <|>// empty | 85 | <|>// empty |
93 | ", | 86 | ", |
94 | ); | 87 | ); |
95 | let symbols = analysis.parent_module(pos.file_id).unwrap(); | 88 | let symbols = analysis.parent_module(pos).unwrap(); |
96 | assert_eq_dbg( | 89 | assert_eq_dbg( |
97 | r#"[(FileId(1), FileSymbol { name: "foo", node_range: [0; 8), kind: MODULE })]"#, | 90 | r#"[(FileId(1), FileSymbol { name: "foo", node_range: [4; 7), kind: MODULE })]"#, |
91 | &symbols, | ||
92 | ); | ||
93 | } | ||
94 | |||
95 | #[test] | ||
96 | fn test_resolve_parent_module_for_inline() { | ||
97 | let (analysis, pos) = analysis_and_position( | ||
98 | " | ||
99 | //- /lib.rs | ||
100 | mod foo { | ||
101 | mod bar { | ||
102 | mod baz { <|> } | ||
103 | } | ||
104 | } | ||
105 | ", | ||
106 | ); | ||
107 | let symbols = analysis.parent_module(pos).unwrap(); | ||
108 | assert_eq_dbg( | ||
109 | r#"[(FileId(1), FileSymbol { name: "bar", node_range: [18; 21), kind: MODULE })]"#, | ||
98 | &symbols, | 110 | &symbols, |
99 | ); | 111 | ); |
100 | } | 112 | } |
@@ -378,9 +390,7 @@ By default this method stops actor's `Context`."# | |||
378 | 390 | ||
379 | fn get_all_refs(text: &str) -> Vec<(FileId, TextRange)> { | 391 | fn get_all_refs(text: &str) -> Vec<(FileId, TextRange)> { |
380 | let (analysis, position) = single_file_with_position(text); | 392 | let (analysis, position) = single_file_with_position(text); |
381 | analysis | 393 | analysis.find_all_refs(position).unwrap() |
382 | .find_all_refs(position.file_id, position.offset) | ||
383 | .unwrap() | ||
384 | } | 394 | } |
385 | 395 | ||
386 | #[test] | 396 | #[test] |
@@ -435,10 +445,7 @@ fn test_complete_crate_path() { | |||
435 | use crate::Sp<|> | 445 | use crate::Sp<|> |
436 | ", | 446 | ", |
437 | ); | 447 | ); |
438 | let completions = analysis | 448 | let completions = analysis.completions(position).unwrap().unwrap(); |
439 | .completions(position.file_id, position.offset) | ||
440 | .unwrap() | ||
441 | .unwrap(); | ||
442 | assert_eq_dbg( | 449 | assert_eq_dbg( |
443 | r#"[CompletionItem { label: "foo", lookup: None, snippet: None }, | 450 | r#"[CompletionItem { label: "foo", lookup: None, snippet: None }, |
444 | CompletionItem { label: "Spam", lookup: None, snippet: None }]"#, | 451 | CompletionItem { label: "Spam", lookup: None, snippet: None }]"#, |
diff --git a/crates/ra_editor/src/code_actions.rs b/crates/ra_editor/src/code_actions.rs index cadcd2720..ef6df0d53 100644 --- a/crates/ra_editor/src/code_actions.rs +++ b/crates/ra_editor/src/code_actions.rs | |||
@@ -103,35 +103,52 @@ pub fn introduce_variable<'a>( | |||
103 | ) -> Option<impl FnOnce() -> LocalEdit + 'a> { | 103 | ) -> Option<impl FnOnce() -> LocalEdit + 'a> { |
104 | let node = find_covering_node(file.syntax(), range); | 104 | let node = find_covering_node(file.syntax(), range); |
105 | let expr = node.ancestors().filter_map(ast::Expr::cast).next()?; | 105 | let expr = node.ancestors().filter_map(ast::Expr::cast).next()?; |
106 | let anchor_stmt = expr | 106 | |
107 | .syntax() | 107 | let anchor_stmt = ahchor_stmt(expr)?; |
108 | .ancestors() | 108 | let indent = anchor_stmt.prev_sibling()?; |
109 | .filter_map(ast::Stmt::cast) | ||
110 | .next()?; | ||
111 | let indent = anchor_stmt.syntax().prev_sibling()?; | ||
112 | if indent.kind() != WHITESPACE { | 109 | if indent.kind() != WHITESPACE { |
113 | return None; | 110 | return None; |
114 | } | 111 | } |
115 | Some(move || { | 112 | return Some(move || { |
116 | let mut buf = String::new(); | 113 | let mut buf = String::new(); |
117 | let mut edit = EditBuilder::new(); | 114 | let mut edit = EditBuilder::new(); |
118 | 115 | ||
119 | buf.push_str("let var_name = "); | 116 | buf.push_str("let var_name = "); |
120 | expr.syntax().text().push_to(&mut buf); | 117 | expr.syntax().text().push_to(&mut buf); |
121 | if expr.syntax().range().start() == anchor_stmt.syntax().range().start() { | 118 | if expr.syntax().range().start() == anchor_stmt.range().start() { |
122 | edit.replace(expr.syntax().range(), buf); | 119 | edit.replace(expr.syntax().range(), buf); |
123 | } else { | 120 | } else { |
124 | buf.push_str(";"); | 121 | buf.push_str(";"); |
125 | indent.text().push_to(&mut buf); | 122 | indent.text().push_to(&mut buf); |
126 | edit.replace(expr.syntax().range(), "var_name".to_string()); | 123 | edit.replace(expr.syntax().range(), "var_name".to_string()); |
127 | edit.insert(anchor_stmt.syntax().range().start(), buf); | 124 | edit.insert(anchor_stmt.range().start(), buf); |
128 | } | 125 | } |
129 | let cursor_position = anchor_stmt.syntax().range().start() + TextUnit::of_str("let "); | 126 | let cursor_position = anchor_stmt.range().start() + TextUnit::of_str("let "); |
130 | LocalEdit { | 127 | LocalEdit { |
131 | edit: edit.finish(), | 128 | edit: edit.finish(), |
132 | cursor_position: Some(cursor_position), | 129 | cursor_position: Some(cursor_position), |
133 | } | 130 | } |
134 | }) | 131 | }); |
132 | |||
133 | /// Statement or last in the block expression, which will follow | ||
134 | /// the freshly introduced var. | ||
135 | fn ahchor_stmt(expr: ast::Expr) -> Option<SyntaxNodeRef> { | ||
136 | expr.syntax().ancestors().find(|&node| { | ||
137 | if ast::Stmt::cast(node).is_some() { | ||
138 | return true; | ||
139 | } | ||
140 | if let Some(expr) = node | ||
141 | .parent() | ||
142 | .and_then(ast::Block::cast) | ||
143 | .and_then(|it| it.expr()) | ||
144 | { | ||
145 | if expr.syntax() == node { | ||
146 | return true; | ||
147 | } | ||
148 | } | ||
149 | false | ||
150 | }) | ||
151 | } | ||
135 | } | 152 | } |
136 | 153 | ||
137 | fn non_trivia_sibling(node: SyntaxNodeRef, direction: Direction) -> Option<SyntaxNodeRef> { | 154 | fn non_trivia_sibling(node: SyntaxNodeRef, direction: Direction) -> Option<SyntaxNodeRef> { |
@@ -207,6 +224,7 @@ fn foo() { | |||
207 | |file, range| introduce_variable(file, range).map(|f| f()), | 224 | |file, range| introduce_variable(file, range).map(|f| f()), |
208 | ); | 225 | ); |
209 | } | 226 | } |
227 | |||
210 | #[test] | 228 | #[test] |
211 | fn test_intrdoduce_var_expr_stmt() { | 229 | fn test_intrdoduce_var_expr_stmt() { |
212 | check_action_range( | 230 | check_action_range( |
@@ -222,4 +240,20 @@ fn foo() { | |||
222 | ); | 240 | ); |
223 | } | 241 | } |
224 | 242 | ||
243 | #[test] | ||
244 | fn test_intrdoduce_var_last_expr() { | ||
245 | check_action_range( | ||
246 | " | ||
247 | fn foo() { | ||
248 | bar(<|>1 + 1<|>) | ||
249 | }", | ||
250 | " | ||
251 | fn foo() { | ||
252 | let <|>var_name = 1 + 1; | ||
253 | bar(var_name) | ||
254 | }", | ||
255 | |file, range| introduce_variable(file, range).map(|f| f()), | ||
256 | ); | ||
257 | } | ||
258 | |||
225 | } | 259 | } |
diff --git a/crates/ra_lsp_server/src/conv.rs b/crates/ra_lsp_server/src/conv.rs index 84ffac2da..fa04f4b00 100644 --- a/crates/ra_lsp_server/src/conv.rs +++ b/crates/ra_lsp_server/src/conv.rs | |||
@@ -2,7 +2,7 @@ use languageserver_types::{ | |||
2 | Location, Position, Range, SymbolKind, TextDocumentEdit, TextDocumentIdentifier, | 2 | Location, Position, Range, SymbolKind, TextDocumentEdit, TextDocumentIdentifier, |
3 | TextDocumentItem, TextDocumentPositionParams, TextEdit, Url, VersionedTextDocumentIdentifier, | 3 | TextDocumentItem, TextDocumentPositionParams, TextEdit, Url, VersionedTextDocumentIdentifier, |
4 | }; | 4 | }; |
5 | use ra_analysis::{FileId, FileSystemEdit, SourceChange, SourceFileEdit}; | 5 | use ra_analysis::{FileId, FileSystemEdit, SourceChange, SourceFileEdit, FilePosition}; |
6 | use ra_editor::{AtomEdit, Edit, LineCol, LineIndex}; | 6 | use ra_editor::{AtomEdit, Edit, LineCol, LineIndex}; |
7 | use ra_syntax::{SyntaxKind, TextRange, TextUnit}; | 7 | use ra_syntax::{SyntaxKind, TextRange, TextUnit}; |
8 | 8 | ||
@@ -165,6 +165,17 @@ impl<'a> TryConvWith for &'a TextDocumentIdentifier { | |||
165 | } | 165 | } |
166 | } | 166 | } |
167 | 167 | ||
168 | impl<'a> TryConvWith for &'a TextDocumentPositionParams { | ||
169 | type Ctx = ServerWorld; | ||
170 | type Output = FilePosition; | ||
171 | fn try_conv_with(self, world: &ServerWorld) -> Result<FilePosition> { | ||
172 | let file_id = self.text_document.try_conv_with(world)?; | ||
173 | let line_index = world.analysis().file_line_index(file_id); | ||
174 | let offset = self.position.conv_with(&line_index); | ||
175 | Ok(FilePosition { file_id, offset }) | ||
176 | } | ||
177 | } | ||
178 | |||
168 | impl<T: TryConvWith> TryConvWith for Vec<T> { | 179 | impl<T: TryConvWith> TryConvWith for Vec<T> { |
169 | type Ctx = <T as TryConvWith>::Ctx; | 180 | type Ctx = <T as TryConvWith>::Ctx; |
170 | type Output = Vec<<T as TryConvWith>::Output>; | 181 | type Output = Vec<<T as TryConvWith>::Output>; |
diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs index c853ff653..5314a333e 100644 --- a/crates/ra_lsp_server/src/main_loop/handlers.rs +++ b/crates/ra_lsp_server/src/main_loop/handlers.rs | |||
@@ -6,9 +6,9 @@ use languageserver_types::{ | |||
6 | DiagnosticSeverity, DocumentSymbol, Documentation, FoldingRange, FoldingRangeKind, | 6 | DiagnosticSeverity, DocumentSymbol, Documentation, FoldingRange, FoldingRangeKind, |
7 | FoldingRangeParams, InsertTextFormat, Location, MarkupContent, MarkupKind, Position, | 7 | FoldingRangeParams, InsertTextFormat, Location, MarkupContent, MarkupKind, Position, |
8 | PrepareRenameResponse, RenameParams, SymbolInformation, TextDocumentIdentifier, TextEdit, | 8 | PrepareRenameResponse, RenameParams, SymbolInformation, TextDocumentIdentifier, TextEdit, |
9 | WorkspaceEdit, | 9 | WorkspaceEdit, ParameterInformation, SignatureInformation, |
10 | }; | 10 | }; |
11 | use ra_analysis::{FileId, FoldKind, Query, RunnableKind}; | 11 | use ra_analysis::{FileId, FoldKind, Query, RunnableKind, FilePosition}; |
12 | use ra_syntax::text_utils::contains_offset_nonstrict; | 12 | use ra_syntax::text_utils::contains_offset_nonstrict; |
13 | use rustc_hash::FxHashMap; | 13 | use rustc_hash::FxHashMap; |
14 | use serde_json::to_value; | 14 | use serde_json::to_value; |
@@ -83,10 +83,8 @@ pub fn handle_on_enter( | |||
83 | world: ServerWorld, | 83 | world: ServerWorld, |
84 | params: req::TextDocumentPositionParams, | 84 | params: req::TextDocumentPositionParams, |
85 | ) -> Result<Option<req::SourceChange>> { | 85 | ) -> Result<Option<req::SourceChange>> { |
86 | let file_id = params.text_document.try_conv_with(&world)?; | 86 | let position = params.try_conv_with(&world)?; |
87 | let line_index = world.analysis().file_line_index(file_id); | 87 | match world.analysis().on_enter(position) { |
88 | let offset = params.position.conv_with(&line_index); | ||
89 | match world.analysis().on_enter(file_id, offset) { | ||
90 | None => Ok(None), | 88 | None => Ok(None), |
91 | Some(edit) => Ok(Some(edit.try_conv_with(&world)?)), | 89 | Some(edit) => Ok(Some(edit.try_conv_with(&world)?)), |
92 | } | 90 | } |
@@ -102,8 +100,11 @@ pub fn handle_on_type_formatting( | |||
102 | 100 | ||
103 | let file_id = params.text_document.try_conv_with(&world)?; | 101 | let file_id = params.text_document.try_conv_with(&world)?; |
104 | let line_index = world.analysis().file_line_index(file_id); | 102 | let line_index = world.analysis().file_line_index(file_id); |
105 | let offset = params.position.conv_with(&line_index); | 103 | let position = FilePosition { |
106 | let edits = match world.analysis().on_eq_typed(file_id, offset) { | 104 | file_id, |
105 | offset: params.position.conv_with(&line_index), | ||
106 | }; | ||
107 | let edits = match world.analysis().on_eq_typed(position) { | ||
107 | None => return Ok(None), | 108 | None => return Ok(None), |
108 | Some(mut action) => action.source_file_edits.pop().unwrap().edits, | 109 | Some(mut action) => action.source_file_edits.pop().unwrap().edits, |
109 | }; | 110 | }; |
@@ -201,14 +202,9 @@ pub fn handle_goto_definition( | |||
201 | world: ServerWorld, | 202 | world: ServerWorld, |
202 | params: req::TextDocumentPositionParams, | 203 | params: req::TextDocumentPositionParams, |
203 | ) -> Result<Option<req::GotoDefinitionResponse>> { | 204 | ) -> Result<Option<req::GotoDefinitionResponse>> { |
204 | let file_id = params.text_document.try_conv_with(&world)?; | 205 | let position = params.try_conv_with(&world)?; |
205 | let line_index = world.analysis().file_line_index(file_id); | ||
206 | let offset = params.position.conv_with(&line_index); | ||
207 | let mut res = Vec::new(); | 206 | let mut res = Vec::new(); |
208 | for (file_id, symbol) in world | 207 | for (file_id, symbol) in world.analysis().approximately_resolve_symbol(position)? { |
209 | .analysis() | ||
210 | .approximately_resolve_symbol(file_id, offset)? | ||
211 | { | ||
212 | let line_index = world.analysis().file_line_index(file_id); | 208 | let line_index = world.analysis().file_line_index(file_id); |
213 | let location = to_location(file_id, symbol.node_range, &world, &line_index)?; | 209 | let location = to_location(file_id, symbol.node_range, &world, &line_index)?; |
214 | res.push(location) | 210 | res.push(location) |
@@ -218,11 +214,11 @@ pub fn handle_goto_definition( | |||
218 | 214 | ||
219 | pub fn handle_parent_module( | 215 | pub fn handle_parent_module( |
220 | world: ServerWorld, | 216 | world: ServerWorld, |
221 | params: TextDocumentIdentifier, | 217 | params: req::TextDocumentPositionParams, |
222 | ) -> Result<Vec<Location>> { | 218 | ) -> Result<Vec<Location>> { |
223 | let file_id = params.try_conv_with(&world)?; | 219 | let position = params.try_conv_with(&world)?; |
224 | let mut res = Vec::new(); | 220 | let mut res = Vec::new(); |
225 | for (file_id, symbol) in world.analysis().parent_module(file_id)? { | 221 | for (file_id, symbol) in world.analysis().parent_module(position)? { |
226 | let line_index = world.analysis().file_line_index(file_id); | 222 | let line_index = world.analysis().file_line_index(file_id); |
227 | let location = to_location(file_id, symbol.node_range, &world, &line_index)?; | 223 | let location = to_location(file_id, symbol.node_range, &world, &line_index)?; |
228 | res.push(location); | 224 | res.push(location); |
@@ -379,10 +375,13 @@ pub fn handle_completion( | |||
379 | world: ServerWorld, | 375 | world: ServerWorld, |
380 | params: req::CompletionParams, | 376 | params: req::CompletionParams, |
381 | ) -> Result<Option<req::CompletionResponse>> { | 377 | ) -> Result<Option<req::CompletionResponse>> { |
382 | let file_id = params.text_document.try_conv_with(&world)?; | 378 | let position = { |
383 | let line_index = world.analysis().file_line_index(file_id); | 379 | let file_id = params.text_document.try_conv_with(&world)?; |
384 | let offset = params.position.conv_with(&line_index); | 380 | let line_index = world.analysis().file_line_index(file_id); |
385 | let items = match world.analysis().completions(file_id, offset)? { | 381 | let offset = params.position.conv_with(&line_index); |
382 | FilePosition { file_id, offset } | ||
383 | }; | ||
384 | let items = match world.analysis().completions(position)? { | ||
386 | None => return Ok(None), | 385 | None => return Ok(None), |
387 | Some(items) => items, | 386 | Some(items) => items, |
388 | }; | 387 | }; |
@@ -442,13 +441,9 @@ pub fn handle_signature_help( | |||
442 | world: ServerWorld, | 441 | world: ServerWorld, |
443 | params: req::TextDocumentPositionParams, | 442 | params: req::TextDocumentPositionParams, |
444 | ) -> Result<Option<req::SignatureHelp>> { | 443 | ) -> Result<Option<req::SignatureHelp>> { |
445 | use languageserver_types::{ParameterInformation, SignatureInformation}; | 444 | let position = params.try_conv_with(&world)?; |
446 | 445 | ||
447 | let file_id = params.text_document.try_conv_with(&world)?; | 446 | if let Some((descriptor, active_param)) = world.analysis().resolve_callable(position)? { |
448 | let line_index = world.analysis().file_line_index(file_id); | ||
449 | let offset = params.position.conv_with(&line_index); | ||
450 | |||
451 | if let Some((descriptor, active_param)) = world.analysis().resolve_callable(file_id, offset)? { | ||
452 | let parameters: Vec<ParameterInformation> = descriptor | 447 | let parameters: Vec<ParameterInformation> = descriptor |
453 | .params | 448 | .params |
454 | .iter() | 449 | .iter() |
@@ -487,18 +482,17 @@ pub fn handle_prepare_rename( | |||
487 | world: ServerWorld, | 482 | world: ServerWorld, |
488 | params: req::TextDocumentPositionParams, | 483 | params: req::TextDocumentPositionParams, |
489 | ) -> Result<Option<PrepareRenameResponse>> { | 484 | ) -> Result<Option<PrepareRenameResponse>> { |
490 | let file_id = params.text_document.try_conv_with(&world)?; | 485 | let position = params.try_conv_with(&world)?; |
491 | let line_index = world.analysis().file_line_index(file_id); | ||
492 | let offset = params.position.conv_with(&line_index); | ||
493 | 486 | ||
494 | // We support renaming references like handle_rename does. | 487 | // We support renaming references like handle_rename does. |
495 | // In the future we may want to reject the renaming of things like keywords here too. | 488 | // In the future we may want to reject the renaming of things like keywords here too. |
496 | let refs = world.analysis().find_all_refs(file_id, offset)?; | 489 | let refs = world.analysis().find_all_refs(position)?; |
497 | if refs.is_empty() { | 490 | let r = match refs.first() { |
498 | return Ok(None); | 491 | Some(r) => r, |
499 | } | 492 | None => return Ok(None), |
500 | 493 | }; | |
501 | let r = refs.first().unwrap(); | 494 | let file_id = params.text_document.try_conv_with(&world)?; |
495 | let line_index = world.analysis().file_line_index(file_id); | ||
502 | let loc = to_location(r.0, r.1, &world, &line_index)?; | 496 | let loc = to_location(r.0, r.1, &world, &line_index)?; |
503 | 497 | ||
504 | Ok(Some(PrepareRenameResponse::Range(loc.range))) | 498 | Ok(Some(PrepareRenameResponse::Range(loc.range))) |
@@ -517,7 +511,9 @@ pub fn handle_rename(world: ServerWorld, params: RenameParams) -> Result<Option< | |||
517 | .into()); | 511 | .into()); |
518 | } | 512 | } |
519 | 513 | ||
520 | let refs = world.analysis().find_all_refs(file_id, offset)?; | 514 | let refs = world |
515 | .analysis() | ||
516 | .find_all_refs(FilePosition { file_id, offset })?; | ||
521 | if refs.is_empty() { | 517 | if refs.is_empty() { |
522 | return Ok(None); | 518 | return Ok(None); |
523 | } | 519 | } |
@@ -548,7 +544,9 @@ pub fn handle_references( | |||
548 | let line_index = world.analysis().file_line_index(file_id); | 544 | let line_index = world.analysis().file_line_index(file_id); |
549 | let offset = params.position.conv_with(&line_index); | 545 | let offset = params.position.conv_with(&line_index); |
550 | 546 | ||
551 | let refs = world.analysis().find_all_refs(file_id, offset)?; | 547 | let refs = world |
548 | .analysis() | ||
549 | .find_all_refs(FilePosition { file_id, offset })?; | ||
552 | 550 | ||
553 | Ok(Some( | 551 | Ok(Some( |
554 | refs.into_iter() | 552 | refs.into_iter() |
diff --git a/crates/ra_lsp_server/src/main_loop/mod.rs b/crates/ra_lsp_server/src/main_loop/mod.rs index 1eb147539..229d1b0f7 100644 --- a/crates/ra_lsp_server/src/main_loop/mod.rs +++ b/crates/ra_lsp_server/src/main_loop/mod.rs | |||
@@ -376,11 +376,21 @@ impl<'a> PoolDispatcher<'a> { | |||
376 | Ok(lsp_error) => { | 376 | Ok(lsp_error) => { |
377 | RawResponse::err(id, lsp_error.code, lsp_error.message) | 377 | RawResponse::err(id, lsp_error.code, lsp_error.message) |
378 | } | 378 | } |
379 | Err(e) => RawResponse::err( | 379 | Err(e) => { |
380 | id, | 380 | if is_canceled(&e) { |
381 | ErrorCode::InternalError as i32, | 381 | RawResponse::err( |
382 | format!("{}\n{}", e, e.backtrace()), | 382 | id, |
383 | ), | 383 | ErrorCode::RequestCancelled as i32, |
384 | e.to_string(), | ||
385 | ) | ||
386 | } else { | ||
387 | RawResponse::err( | ||
388 | id, | ||
389 | ErrorCode::InternalError as i32, | ||
390 | format!("{}\n{}", e, e.backtrace()), | ||
391 | ) | ||
392 | } | ||
393 | } | ||
384 | }, | 394 | }, |
385 | }; | 395 | }; |
386 | let task = Task::Respond(resp); | 396 | let task = Task::Respond(resp); |
diff --git a/crates/ra_lsp_server/src/req.rs b/crates/ra_lsp_server/src/req.rs index 9d911912d..fcb7e94e1 100644 --- a/crates/ra_lsp_server/src/req.rs +++ b/crates/ra_lsp_server/src/req.rs | |||
@@ -93,7 +93,7 @@ pub struct Decoration { | |||
93 | pub enum ParentModule {} | 93 | pub enum ParentModule {} |
94 | 94 | ||
95 | impl Request for ParentModule { | 95 | impl Request for ParentModule { |
96 | type Params = TextDocumentIdentifier; | 96 | type Params = TextDocumentPositionParams; |
97 | type Result = Vec<Location>; | 97 | type Result = Vec<Location>; |
98 | const METHOD: &'static str = "m/parentModule"; | 98 | const METHOD: &'static str = "m/parentModule"; |
99 | } | 99 | } |
diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml index 043c9bacd..de4b25e67 100644 --- a/crates/ra_syntax/Cargo.toml +++ b/crates/ra_syntax/Cargo.toml | |||
@@ -1,9 +1,11 @@ | |||
1 | [package] | 1 | [package] |
2 | edition = "2018" | 2 | edition = "2015" |
3 | name = "ra_syntax" | 3 | name = "ra_syntax" |
4 | version = "0.1.0" | 4 | version = "0.1.0" |
5 | authors = ["Aleksey Kladov <[email protected]>"] | 5 | authors = ["Aleksey Kladov <[email protected]>"] |
6 | license = "MIT OR Apache-2.0" | 6 | license = "MIT OR Apache-2.0" |
7 | description = "Comment and whitespace preserving parser for the Rust langauge" | ||
8 | repository = "https://github.com/rust-analyzer/rust-analyzer" | ||
7 | 9 | ||
8 | [dependencies] | 10 | [dependencies] |
9 | unicode-xid = "0.1.0" | 11 | unicode-xid = "0.1.0" |
diff --git a/crates/ra_syntax/src/grammar/mod.rs b/crates/ra_syntax/src/grammar/mod.rs index c87564073..95c437983 100644 --- a/crates/ra_syntax/src/grammar/mod.rs +++ b/crates/ra_syntax/src/grammar/mod.rs | |||
@@ -21,6 +21,11 @@ | |||
21 | //! After adding a new inline-test, run `cargo collect-tests` to extract | 21 | //! After adding a new inline-test, run `cargo collect-tests` to extract |
22 | //! it as a standalone text-fixture into `tests/data/parser/inline`, and | 22 | //! it as a standalone text-fixture into `tests/data/parser/inline`, and |
23 | //! run `cargo test` once to create the "gold" value. | 23 | //! run `cargo test` once to create the "gold" value. |
24 | //! | ||
25 | //! Coding convention: rules like `where_clause` always produce either a | ||
26 | //! node or an error, rules like `opt_where_clause` may produce nothing. | ||
27 | //! Non-opt rules typically start with `assert!(p.at(FIRST_TOKEN))`, the | ||
28 | //! caller is responsible for branching on the first token. | ||
24 | mod attributes; | 29 | mod attributes; |
25 | mod expressions; | 30 | mod expressions; |
26 | mod items; | 31 | mod items; |
diff --git a/crates/ra_syntax/src/grammar/type_params.rs b/crates/ra_syntax/src/grammar/type_params.rs index 79f5036b4..68eca0ce8 100644 --- a/crates/ra_syntax/src/grammar/type_params.rs +++ b/crates/ra_syntax/src/grammar/type_params.rs | |||
@@ -4,6 +4,11 @@ pub(super) fn opt_type_param_list(p: &mut Parser) { | |||
4 | if !p.at(L_ANGLE) { | 4 | if !p.at(L_ANGLE) { |
5 | return; | 5 | return; |
6 | } | 6 | } |
7 | type_param_list(p); | ||
8 | } | ||
9 | |||
10 | fn type_param_list(p: &mut Parser) { | ||
11 | assert!(p.at(L_ANGLE)); | ||
7 | let m = p.start(); | 12 | let m = p.start(); |
8 | p.bump(); | 13 | p.bump(); |
9 | 14 | ||
@@ -19,32 +24,32 @@ pub(super) fn opt_type_param_list(p: &mut Parser) { | |||
19 | } | 24 | } |
20 | p.expect(R_ANGLE); | 25 | p.expect(R_ANGLE); |
21 | m.complete(p, TYPE_PARAM_LIST); | 26 | m.complete(p, TYPE_PARAM_LIST); |
27 | } | ||
22 | 28 | ||
23 | fn lifetime_param(p: &mut Parser) { | 29 | fn lifetime_param(p: &mut Parser) { |
24 | assert!(p.at(LIFETIME)); | 30 | assert!(p.at(LIFETIME)); |
25 | let m = p.start(); | 31 | let m = p.start(); |
26 | p.bump(); | 32 | p.bump(); |
27 | if p.at(COLON) { | 33 | if p.at(COLON) { |
28 | lifetime_bounds(p); | 34 | lifetime_bounds(p); |
29 | } | ||
30 | m.complete(p, LIFETIME_PARAM); | ||
31 | } | 35 | } |
36 | m.complete(p, LIFETIME_PARAM); | ||
37 | } | ||
32 | 38 | ||
33 | fn type_param(p: &mut Parser) { | 39 | fn type_param(p: &mut Parser) { |
34 | assert!(p.at(IDENT)); | 40 | assert!(p.at(IDENT)); |
35 | let m = p.start(); | 41 | let m = p.start(); |
36 | name(p); | 42 | name(p); |
37 | if p.at(COLON) { | 43 | if p.at(COLON) { |
38 | bounds(p); | 44 | bounds(p); |
39 | } | ||
40 | // test type_param_default | ||
41 | // struct S<T = i32>; | ||
42 | if p.at(EQ) { | ||
43 | p.bump(); | ||
44 | types::type_(p) | ||
45 | } | ||
46 | m.complete(p, TYPE_PARAM); | ||
47 | } | 45 | } |
46 | // test type_param_default | ||
47 | // struct S<T = i32>; | ||
48 | if p.at(EQ) { | ||
49 | p.bump(); | ||
50 | types::type_(p) | ||
51 | } | ||
52 | m.complete(p, TYPE_PARAM); | ||
48 | } | 53 | } |
49 | 54 | ||
50 | // test type_param_bounds | 55 | // test type_param_bounds |
@@ -99,7 +104,7 @@ pub(super) fn opt_where_clause(p: &mut Parser) { | |||
99 | let m = p.start(); | 104 | let m = p.start(); |
100 | p.bump(); | 105 | p.bump(); |
101 | loop { | 106 | loop { |
102 | if !(paths::is_path_start(p) || p.current() == LIFETIME) { | 107 | if !(paths::is_path_start(p) || p.current() == LIFETIME || p.current() == FOR_KW) { |
103 | break; | 108 | break; |
104 | } | 109 | } |
105 | where_predicate(p); | 110 | where_predicate(p); |
@@ -112,19 +117,30 @@ pub(super) fn opt_where_clause(p: &mut Parser) { | |||
112 | 117 | ||
113 | fn where_predicate(p: &mut Parser) { | 118 | fn where_predicate(p: &mut Parser) { |
114 | let m = p.start(); | 119 | let m = p.start(); |
115 | if p.at(LIFETIME) { | 120 | match p.current() { |
116 | p.eat(LIFETIME); | 121 | LIFETIME => { |
117 | if p.at(COLON) { | 122 | p.bump(); |
118 | lifetime_bounds(p) | 123 | if p.at(COLON) { |
119 | } else { | 124 | lifetime_bounds(p); |
120 | p.error("expected colon") | 125 | } else { |
126 | p.error("expected colon"); | ||
127 | } | ||
121 | } | 128 | } |
122 | } else { | 129 | _ => { |
123 | types::path_type(p); | 130 | // test where_pred_for |
124 | if p.at(COLON) { | 131 | // fn test<F>() |
125 | bounds(p); | 132 | // where |
126 | } else { | 133 | // for<'a> F: Fn(&'a str) |
127 | p.error("expected colon") | 134 | // { } |
135 | if p.at(FOR_KW) { | ||
136 | types::for_binder(p); | ||
137 | } | ||
138 | types::path_type(p); | ||
139 | if p.at(COLON) { | ||
140 | bounds(p); | ||
141 | } else { | ||
142 | p.error("expected colon"); | ||
143 | } | ||
128 | } | 144 | } |
129 | } | 145 | } |
130 | m.complete(p, WHERE_PRED); | 146 | m.complete(p, WHERE_PRED); |
diff --git a/crates/ra_syntax/src/grammar/types.rs b/crates/ra_syntax/src/grammar/types.rs index f308aef89..ed2718e73 100644 --- a/crates/ra_syntax/src/grammar/types.rs +++ b/crates/ra_syntax/src/grammar/types.rs | |||
@@ -188,13 +188,22 @@ fn fn_pointer_type(p: &mut Parser) { | |||
188 | m.complete(p, FN_POINTER_TYPE); | 188 | m.complete(p, FN_POINTER_TYPE); |
189 | } | 189 | } |
190 | 190 | ||
191 | pub(super) fn for_binder(p: &mut Parser) { | ||
192 | assert!(p.at(FOR_KW)); | ||
193 | p.bump(); | ||
194 | if p.at(L_ANGLE) { | ||
195 | type_params::opt_type_param_list(p); | ||
196 | } else { | ||
197 | p.error("expected `<`"); | ||
198 | } | ||
199 | } | ||
200 | |||
191 | // test for_type | 201 | // test for_type |
192 | // type A = for<'a> fn() -> (); | 202 | // type A = for<'a> fn() -> (); |
193 | pub(super) fn for_type(p: &mut Parser) { | 203 | pub(super) fn for_type(p: &mut Parser) { |
194 | assert!(p.at(FOR_KW)); | 204 | assert!(p.at(FOR_KW)); |
195 | let m = p.start(); | 205 | let m = p.start(); |
196 | p.bump(); | 206 | for_binder(p); |
197 | type_params::opt_type_param_list(p); | ||
198 | match p.current() { | 207 | match p.current() { |
199 | FN_KW | UNSAFE_KW | EXTERN_KW => fn_pointer_type(p), | 208 | FN_KW | UNSAFE_KW | EXTERN_KW => fn_pointer_type(p), |
200 | _ if paths::is_path_start(p) => path_type_(p, false), | 209 | _ if paths::is_path_start(p) => path_type_(p, false), |
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index 69a679d04..123002825 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs | |||
@@ -60,6 +60,7 @@ pub use crate::{ | |||
60 | 60 | ||
61 | use crate::yellow::GreenNode; | 61 | use crate::yellow::GreenNode; |
62 | 62 | ||
63 | /// File represents a parse tree for a single Rust file. | ||
63 | #[derive(Clone, Debug, Hash, PartialEq, Eq)] | 64 | #[derive(Clone, Debug, Hash, PartialEq, Eq)] |
64 | pub struct File { | 65 | pub struct File { |
65 | root: SyntaxNode, | 66 | root: SyntaxNode, |
@@ -92,9 +93,11 @@ impl File { | |||
92 | text_utils::replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert); | 93 | text_utils::replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert); |
93 | File::parse(&text) | 94 | File::parse(&text) |
94 | } | 95 | } |
96 | /// Typed AST representation of the parse tree. | ||
95 | pub fn ast(&self) -> ast::Root { | 97 | pub fn ast(&self) -> ast::Root { |
96 | ast::Root::cast(self.syntax()).unwrap() | 98 | ast::Root::cast(self.syntax()).unwrap() |
97 | } | 99 | } |
100 | /// Untyped homogeneous representation of the parse tree. | ||
98 | pub fn syntax(&self) -> SyntaxNodeRef { | 101 | pub fn syntax(&self) -> SyntaxNodeRef { |
99 | self.root.borrowed() | 102 | self.root.borrowed() |
100 | } | 103 | } |
diff --git a/crates/ra_syntax/tests/data/parser/inline/0113_where_pred_for.rs b/crates/ra_syntax/tests/data/parser/inline/0113_where_pred_for.rs new file mode 100644 index 000000000..b448c6178 --- /dev/null +++ b/crates/ra_syntax/tests/data/parser/inline/0113_where_pred_for.rs | |||
@@ -0,0 +1,4 @@ | |||
1 | fn test<F>() | ||
2 | where | ||
3 | for<'a> F: Fn(&'a str) | ||
4 | { } | ||
diff --git a/crates/ra_syntax/tests/data/parser/inline/0113_where_pred_for.txt b/crates/ra_syntax/tests/data/parser/inline/0113_where_pred_for.txt new file mode 100644 index 000000000..08aacc77a --- /dev/null +++ b/crates/ra_syntax/tests/data/parser/inline/0113_where_pred_for.txt | |||
@@ -0,0 +1,58 @@ | |||
1 | ROOT@[0; 49) | ||
2 | FN_DEF@[0; 48) | ||
3 | FN_KW@[0; 2) | ||
4 | WHITESPACE@[2; 3) | ||
5 | NAME@[3; 7) | ||
6 | IDENT@[3; 7) "test" | ||
7 | TYPE_PARAM_LIST@[7; 10) | ||
8 | L_ANGLE@[7; 8) | ||
9 | TYPE_PARAM@[8; 9) | ||
10 | NAME@[8; 9) | ||
11 | IDENT@[8; 9) "F" | ||
12 | R_ANGLE@[9; 10) | ||
13 | PARAM_LIST@[10; 12) | ||
14 | L_PAREN@[10; 11) | ||
15 | R_PAREN@[11; 12) | ||
16 | WHITESPACE@[12; 13) | ||
17 | WHERE_CLAUSE@[13; 44) | ||
18 | WHERE_KW@[13; 18) | ||
19 | WHITESPACE@[18; 22) | ||
20 | WHERE_PRED@[22; 44) | ||
21 | FOR_KW@[22; 25) | ||
22 | TYPE_PARAM_LIST@[25; 29) | ||
23 | L_ANGLE@[25; 26) | ||
24 | LIFETIME_PARAM@[26; 28) | ||
25 | LIFETIME@[26; 28) "'a" | ||
26 | R_ANGLE@[28; 29) | ||
27 | WHITESPACE@[29; 30) | ||
28 | PATH_TYPE@[30; 31) | ||
29 | PATH@[30; 31) | ||
30 | PATH_SEGMENT@[30; 31) | ||
31 | NAME_REF@[30; 31) | ||
32 | IDENT@[30; 31) "F" | ||
33 | COLON@[31; 32) | ||
34 | WHITESPACE@[32; 33) | ||
35 | PATH_TYPE@[33; 44) | ||
36 | PATH@[33; 44) | ||
37 | PATH_SEGMENT@[33; 44) | ||
38 | NAME_REF@[33; 35) | ||
39 | IDENT@[33; 35) "Fn" | ||
40 | PARAM_LIST@[35; 44) | ||
41 | L_PAREN@[35; 36) | ||
42 | PARAM@[36; 43) | ||
43 | REFERENCE_TYPE@[36; 43) | ||
44 | AMP@[36; 37) | ||
45 | LIFETIME@[37; 39) "'a" | ||
46 | WHITESPACE@[39; 40) | ||
47 | PATH_TYPE@[40; 43) | ||
48 | PATH@[40; 43) | ||
49 | PATH_SEGMENT@[40; 43) | ||
50 | NAME_REF@[40; 43) | ||
51 | IDENT@[40; 43) "str" | ||
52 | R_PAREN@[43; 44) | ||
53 | WHITESPACE@[44; 45) | ||
54 | BLOCK@[45; 48) | ||
55 | L_CURLY@[45; 46) | ||
56 | WHITESPACE@[46; 47) | ||
57 | R_CURLY@[47; 48) | ||
58 | WHITESPACE@[48; 49) | ||
diff --git a/editors/code/src/commands/on_enter.ts b/editors/code/src/commands/on_enter.ts index fe6aca63d..64401b684 100644 --- a/editors/code/src/commands/on_enter.ts +++ b/editors/code/src/commands/on_enter.ts | |||
@@ -6,10 +6,6 @@ import { | |||
6 | SourceChange | 6 | SourceChange |
7 | } from './apply_source_change'; | 7 | } from './apply_source_change'; |
8 | 8 | ||
9 | interface OnEnterParams { | ||
10 | textDocument: lc.TextDocumentIdentifier; | ||
11 | position: lc.Position; | ||
12 | } | ||
13 | 9 | ||
14 | export async function handle(event: { text: string }): Promise<boolean> { | 10 | export async function handle(event: { text: string }): Promise<boolean> { |
15 | const editor = vscode.window.activeTextEditor; | 11 | const editor = vscode.window.activeTextEditor; |
@@ -20,7 +16,7 @@ export async function handle(event: { text: string }): Promise<boolean> { | |||
20 | ) { | 16 | ) { |
21 | return false; | 17 | return false; |
22 | } | 18 | } |
23 | const request: OnEnterParams = { | 19 | const request: lc.TextDocumentPositionParams = { |
24 | textDocument: { uri: editor.document.uri.toString() }, | 20 | textDocument: { uri: editor.document.uri.toString() }, |
25 | position: Server.client.code2ProtocolConverter.asPosition( | 21 | position: Server.client.code2ProtocolConverter.asPosition( |
26 | editor.selection.active | 22 | editor.selection.active |
diff --git a/editors/code/src/commands/parent_module.ts b/editors/code/src/commands/parent_module.ts index 4bb92eb96..806c3d34c 100644 --- a/editors/code/src/commands/parent_module.ts +++ b/editors/code/src/commands/parent_module.ts | |||
@@ -1,6 +1,6 @@ | |||
1 | import * as vscode from 'vscode'; | 1 | import * as vscode from 'vscode'; |
2 | 2 | ||
3 | import { Location, TextDocumentIdentifier } from 'vscode-languageclient'; | 3 | import * as lc from 'vscode-languageclient'; |
4 | import { Server } from '../server'; | 4 | import { Server } from '../server'; |
5 | 5 | ||
6 | export async function handle() { | 6 | export async function handle() { |
@@ -8,10 +8,13 @@ export async function handle() { | |||
8 | if (editor == null || editor.document.languageId !== 'rust') { | 8 | if (editor == null || editor.document.languageId !== 'rust') { |
9 | return; | 9 | return; |
10 | } | 10 | } |
11 | const request: TextDocumentIdentifier = { | 11 | const request: lc.TextDocumentPositionParams = { |
12 | uri: editor.document.uri.toString() | 12 | textDocument: { uri: editor.document.uri.toString() }, |
13 | position: Server.client.code2ProtocolConverter.asPosition( | ||
14 | editor.selection.active | ||
15 | ) | ||
13 | }; | 16 | }; |
14 | const response = await Server.client.sendRequest<Location[]>( | 17 | const response = await Server.client.sendRequest<lc.Location[]>( |
15 | 'm/parentModule', | 18 | 'm/parentModule', |
16 | request | 19 | request |
17 | ); | 20 | ); |