diff options
Diffstat (limited to 'crates')
-rw-r--r-- | crates/ra_hir/src/source_binder.rs | 13 | ||||
-rw-r--r-- | crates/ra_hir_expand/src/lib.rs | 44 | ||||
-rw-r--r-- | crates/ra_ide_api/src/goto_definition.rs | 48 | ||||
-rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 13 |
4 files changed, 74 insertions, 44 deletions
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index 75a467fb3..f0ed8e2b2 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs | |||
@@ -16,7 +16,7 @@ use ra_syntax::{ | |||
16 | ast::{self, AstNode}, | 16 | ast::{self, AstNode}, |
17 | match_ast, AstPtr, | 17 | match_ast, AstPtr, |
18 | SyntaxKind::*, | 18 | SyntaxKind::*, |
19 | SyntaxNode, SyntaxNodePtr, TextRange, TextUnit, | 19 | SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextUnit, |
20 | }; | 20 | }; |
21 | 21 | ||
22 | use crate::{ | 22 | use crate::{ |
@@ -131,11 +131,16 @@ pub struct Expansion { | |||
131 | } | 131 | } |
132 | 132 | ||
133 | impl Expansion { | 133 | impl Expansion { |
134 | pub fn translate_offset(&self, db: &impl HirDatabase, offset: TextUnit) -> Option<TextUnit> { | 134 | pub fn map_token_down( |
135 | &self, | ||
136 | db: &impl HirDatabase, | ||
137 | token: Source<&SyntaxToken>, | ||
138 | ) -> Option<Source<SyntaxToken>> { | ||
135 | let exp_info = self.file_id().expansion_info(db)?; | 139 | let exp_info = self.file_id().expansion_info(db)?; |
136 | exp_info.translate_offset(offset) | 140 | exp_info.map_token_down(token) |
137 | } | 141 | } |
138 | pub fn file_id(&self) -> HirFileId { | 142 | |
143 | fn file_id(&self) -> HirFileId { | ||
139 | self.macro_call_id.as_file(MacroFileKind::Items) | 144 | self.macro_call_id.as_file(MacroFileKind::Items) |
140 | } | 145 | } |
141 | } | 146 | } |
diff --git a/crates/ra_hir_expand/src/lib.rs b/crates/ra_hir_expand/src/lib.rs index 5927a03ba..57e2e6cb1 100644 --- a/crates/ra_hir_expand/src/lib.rs +++ b/crates/ra_hir_expand/src/lib.rs | |||
@@ -18,8 +18,9 @@ use std::sync::Arc; | |||
18 | 18 | ||
19 | use ra_db::{salsa, CrateId, FileId}; | 19 | use ra_db::{salsa, CrateId, FileId}; |
20 | use ra_syntax::{ | 20 | use ra_syntax::{ |
21 | algo, | ||
21 | ast::{self, AstNode}, | 22 | ast::{self, AstNode}, |
22 | SyntaxNode, TextRange, TextUnit, | 23 | SyntaxNode, SyntaxToken, TextRange, TextUnit, |
23 | }; | 24 | }; |
24 | 25 | ||
25 | use crate::ast_id_map::FileAstId; | 26 | use crate::ast_id_map::FileAstId; |
@@ -83,13 +84,21 @@ impl HirFileId { | |||
83 | loc.def.ast_id.to_node(db).token_tree()?.syntax().text_range().start(); | 84 | loc.def.ast_id.to_node(db).token_tree()?.syntax().text_range().start(); |
84 | 85 | ||
85 | let macro_def = db.macro_def(loc.def)?; | 86 | let macro_def = db.macro_def(loc.def)?; |
86 | let exp_map = db.parse_macro(macro_file)?.1; | 87 | let (parse, exp_map) = db.parse_macro(macro_file)?; |
88 | let expanded = Source::new(self, parse.syntax_node()); | ||
87 | let macro_arg = db.macro_arg(macro_file.macro_call_id)?; | 89 | let macro_arg = db.macro_arg(macro_file.macro_call_id)?; |
88 | 90 | ||
89 | let arg_start = (loc.ast_id.file_id, arg_start); | 91 | let arg_start = (loc.ast_id.file_id, arg_start); |
90 | let def_start = (loc.def.ast_id.file_id, def_start); | 92 | let def_start = (loc.def.ast_id.file_id, def_start); |
91 | 93 | ||
92 | Some(ExpansionInfo { arg_start, def_start, macro_arg, macro_def, exp_map }) | 94 | Some(ExpansionInfo { |
95 | expanded, | ||
96 | arg_start, | ||
97 | def_start, | ||
98 | macro_arg, | ||
99 | macro_def, | ||
100 | exp_map, | ||
101 | }) | ||
93 | } | 102 | } |
94 | } | 103 | } |
95 | } | 104 | } |
@@ -146,27 +155,34 @@ impl MacroCallId { | |||
146 | } | 155 | } |
147 | } | 156 | } |
148 | 157 | ||
149 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
150 | /// ExpansionInfo mainly describes how to map text range between src and expanded macro | 158 | /// ExpansionInfo mainly describes how to map text range between src and expanded macro |
159 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
151 | pub struct ExpansionInfo { | 160 | pub struct ExpansionInfo { |
152 | pub(crate) arg_start: (HirFileId, TextUnit), | 161 | expanded: Source<SyntaxNode>, |
153 | pub(crate) def_start: (HirFileId, TextUnit), | 162 | arg_start: (HirFileId, TextUnit), |
163 | def_start: (HirFileId, TextUnit), | ||
154 | 164 | ||
155 | pub(crate) macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, | 165 | macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, |
156 | pub(crate) macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, | 166 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, |
157 | pub(crate) exp_map: Arc<mbe::RevTokenMap>, | 167 | exp_map: Arc<mbe::RevTokenMap>, |
158 | } | 168 | } |
159 | 169 | ||
160 | impl ExpansionInfo { | 170 | impl ExpansionInfo { |
161 | pub fn translate_offset(&self, offset: TextUnit) -> Option<TextUnit> { | 171 | pub fn map_token_down(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> { |
162 | let offset = offset.checked_sub(self.arg_start.1)?; | 172 | assert_eq!(token.file_id, self.arg_start.0); |
163 | let token_id = self.macro_arg.1.token_by_offset(offset)?; | 173 | let range = token.ast.text_range().checked_sub(self.arg_start.1)?; |
174 | let token_id = self.macro_arg.1.token_by_range(range)?; | ||
164 | let token_id = self.macro_def.0.map_id_down(token_id); | 175 | let token_id = self.macro_def.0.map_id_down(token_id); |
165 | 176 | ||
166 | let (r, _) = self.exp_map.ranges.iter().find(|(_, tid)| *tid == token_id)?; | 177 | let range = self.exp_map.range_by_token(token_id)?; |
167 | Some(r.start()) | 178 | |
179 | let token = algo::find_covering_element(&self.expanded.ast, range).into_token()?; | ||
180 | |||
181 | Some(self.expanded.with_ast(token)) | ||
168 | } | 182 | } |
169 | 183 | ||
184 | // FIXME: a more correct signature would be | ||
185 | // `pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>>` | ||
170 | pub fn find_range(&self, from: TextRange) -> Option<(HirFileId, TextRange)> { | 186 | pub fn find_range(&self, from: TextRange) -> Option<(HirFileId, TextRange)> { |
171 | let token_id = look_in_rev_map(&self.exp_map, from)?; | 187 | let token_id = look_in_rev_map(&self.exp_map, from)?; |
172 | 188 | ||
diff --git a/crates/ra_ide_api/src/goto_definition.rs b/crates/ra_ide_api/src/goto_definition.rs index 70baa294f..b693a4c31 100644 --- a/crates/ra_ide_api/src/goto_definition.rs +++ b/crates/ra_ide_api/src/goto_definition.rs | |||
@@ -4,9 +4,8 @@ use std::iter::successors; | |||
4 | 4 | ||
5 | use hir::{db::AstDatabase, Source}; | 5 | use hir::{db::AstDatabase, Source}; |
6 | use ra_syntax::{ | 6 | use ra_syntax::{ |
7 | algo::find_node_at_offset, | ||
8 | ast::{self, DocCommentsOwner}, | 7 | ast::{self, DocCommentsOwner}, |
9 | match_ast, AstNode, SyntaxNode, TextUnit, | 8 | match_ast, AstNode, SyntaxNode, SyntaxToken, |
10 | }; | 9 | }; |
11 | 10 | ||
12 | use crate::{ | 11 | use crate::{ |
@@ -20,37 +19,42 @@ pub(crate) fn goto_definition( | |||
20 | db: &RootDatabase, | 19 | db: &RootDatabase, |
21 | position: FilePosition, | 20 | position: FilePosition, |
22 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { | 21 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { |
23 | let offset = descend_into_macros(db, position); | 22 | let token = descend_into_macros(db, position)?; |
24 | 23 | ||
25 | let syntax = db.parse_or_expand(offset.file_id)?; | 24 | let res = match_ast! { |
25 | match (token.ast.parent()) { | ||
26 | ast::NameRef(name_ref) => { | ||
27 | let navs = reference_definition(db, token.with_ast(&name_ref)).to_vec(); | ||
28 | RangeInfo::new(name_ref.syntax().text_range(), navs.to_vec()) | ||
29 | }, | ||
30 | ast::Name(name) => { | ||
31 | let navs = name_definition(db, token.with_ast(&name))?; | ||
32 | RangeInfo::new(name.syntax().text_range(), navs) | ||
26 | 33 | ||
27 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&syntax, offset.ast) { | 34 | }, |
28 | let navs = reference_definition(db, offset.with_ast(&name_ref)).to_vec(); | 35 | _ => return None, |
29 | return Some(RangeInfo::new(name_ref.syntax().text_range(), navs.to_vec())); | 36 | } |
30 | } | 37 | }; |
31 | if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, offset.ast) { | 38 | |
32 | let navs = name_definition(db, offset.with_ast(&name))?; | 39 | Some(res) |
33 | return Some(RangeInfo::new(name.syntax().text_range(), navs)); | ||
34 | } | ||
35 | None | ||
36 | } | 40 | } |
37 | 41 | ||
38 | fn descend_into_macros(db: &RootDatabase, position: FilePosition) -> Source<TextUnit> { | 42 | fn descend_into_macros(db: &RootDatabase, position: FilePosition) -> Option<Source<SyntaxToken>> { |
39 | successors(Some(Source::new(position.file_id.into(), position.offset)), |offset| { | 43 | let file = db.parse_or_expand(position.file_id.into())?; |
40 | let syntax = db.parse_or_expand(offset.file_id)?; | 44 | let token = file.token_at_offset(position.offset).filter(|it| !it.kind().is_trivia()).next()?; |
41 | let macro_call = find_node_at_offset::<ast::MacroCall>(&syntax, offset.ast)?; | 45 | |
46 | successors(Some(Source::new(position.file_id.into(), token)), |token| { | ||
47 | let macro_call = token.ast.ancestors().find_map(ast::MacroCall::cast)?; | ||
42 | let tt = macro_call.token_tree()?; | 48 | let tt = macro_call.token_tree()?; |
43 | if !tt.syntax().text_range().contains(offset.ast) { | 49 | if !token.ast.text_range().is_subrange(&tt.syntax().text_range()) { |
44 | return None; | 50 | return None; |
45 | } | 51 | } |
46 | let source_analyzer = | 52 | let source_analyzer = |
47 | hir::SourceAnalyzer::new(db, offset.with_ast(macro_call.syntax()), None); | 53 | hir::SourceAnalyzer::new(db, token.with_ast(token.ast.parent()).as_ref(), None); |
48 | let exp = source_analyzer.expand(db, ¯o_call)?; | 54 | let exp = source_analyzer.expand(db, ¯o_call)?; |
49 | let next_offset = exp.translate_offset(db, offset.ast)?; | 55 | exp.map_token_down(db, token.as_ref()) |
50 | Some(Source::new(exp.file_id(), next_offset)) | ||
51 | }) | 56 | }) |
52 | .last() | 57 | .last() |
53 | .unwrap() | ||
54 | } | 58 | } |
55 | 59 | ||
56 | #[derive(Debug)] | 60 | #[derive(Debug)] |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 37382d2df..8398c9ac7 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -77,14 +77,14 @@ pub fn token_tree_to_syntax_node( | |||
77 | } | 77 | } |
78 | 78 | ||
79 | impl TokenMap { | 79 | impl TokenMap { |
80 | pub fn token_by_offset(&self, relative_offset: TextUnit) -> Option<tt::TokenId> { | 80 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { |
81 | let (idx, _) = | 81 | let (idx, _) = |
82 | self.tokens.iter().enumerate().find(|(_, range)| range.contains(relative_offset))?; | 82 | self.tokens.iter().enumerate().find(|(_, range)| **range == relative_range)?; |
83 | Some(tt::TokenId(idx as u32)) | 83 | Some(tt::TokenId(idx as u32)) |
84 | } | 84 | } |
85 | 85 | ||
86 | pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> { | 86 | pub fn relative_range_of(&self, token_id: tt::TokenId) -> Option<TextRange> { |
87 | let idx = tt.0 as usize; | 87 | let idx = token_id.0 as usize; |
88 | self.tokens.get(idx).copied() | 88 | self.tokens.get(idx).copied() |
89 | } | 89 | } |
90 | 90 | ||
@@ -96,6 +96,11 @@ impl TokenMap { | |||
96 | } | 96 | } |
97 | 97 | ||
98 | impl RevTokenMap { | 98 | impl RevTokenMap { |
99 | pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TextRange> { | ||
100 | let &(r, _) = self.ranges.iter().find(|(_, tid)| *tid == token_id)?; | ||
101 | Some(r) | ||
102 | } | ||
103 | |||
99 | fn add(&mut self, relative_range: TextRange, token_id: tt::TokenId) { | 104 | fn add(&mut self, relative_range: TextRange, token_id: tt::TokenId) { |
100 | self.ranges.push((relative_range, token_id.clone())) | 105 | self.ranges.push((relative_range, token_id.clone())) |
101 | } | 106 | } |