diff options
Diffstat (limited to 'crates')
-rw-r--r-- | crates/ra_hir_expand/src/lib.rs | 51 | ||||
-rw-r--r-- | crates/ra_ide_api/src/expand.rs | 33 | ||||
-rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 6 |
3 files changed, 52 insertions, 38 deletions
diff --git a/crates/ra_hir_expand/src/lib.rs b/crates/ra_hir_expand/src/lib.rs index 57e2e6cb1..73ec1688c 100644 --- a/crates/ra_hir_expand/src/lib.rs +++ b/crates/ra_hir_expand/src/lib.rs | |||
@@ -20,7 +20,7 @@ use ra_db::{salsa, CrateId, FileId}; | |||
20 | use ra_syntax::{ | 20 | use ra_syntax::{ |
21 | algo, | 21 | algo, |
22 | ast::{self, AstNode}, | 22 | ast::{self, AstNode}, |
23 | SyntaxNode, SyntaxToken, TextRange, TextUnit, | 23 | SyntaxNode, SyntaxToken, TextUnit, |
24 | }; | 24 | }; |
25 | 25 | ||
26 | use crate::ast_id_map::FileAstId; | 26 | use crate::ast_id_map::FileAstId; |
@@ -79,22 +79,17 @@ impl HirFileId { | |||
79 | HirFileIdRepr::MacroFile(macro_file) => { | 79 | HirFileIdRepr::MacroFile(macro_file) => { |
80 | let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id); | 80 | let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id); |
81 | 81 | ||
82 | let arg_start = loc.ast_id.to_node(db).token_tree()?.syntax().text_range().start(); | 82 | let arg_tt = loc.ast_id.to_node(db).token_tree()?; |
83 | let def_start = | 83 | let def_tt = loc.def.ast_id.to_node(db).token_tree()?; |
84 | loc.def.ast_id.to_node(db).token_tree()?.syntax().text_range().start(); | ||
85 | 84 | ||
86 | let macro_def = db.macro_def(loc.def)?; | 85 | let macro_def = db.macro_def(loc.def)?; |
87 | let (parse, exp_map) = db.parse_macro(macro_file)?; | 86 | let (parse, exp_map) = db.parse_macro(macro_file)?; |
88 | let expanded = Source::new(self, parse.syntax_node()); | ||
89 | let macro_arg = db.macro_arg(macro_file.macro_call_id)?; | 87 | let macro_arg = db.macro_arg(macro_file.macro_call_id)?; |
90 | 88 | ||
91 | let arg_start = (loc.ast_id.file_id, arg_start); | ||
92 | let def_start = (loc.def.ast_id.file_id, def_start); | ||
93 | |||
94 | Some(ExpansionInfo { | 89 | Some(ExpansionInfo { |
95 | expanded, | 90 | expanded: Source::new(self, parse.syntax_node()), |
96 | arg_start, | 91 | arg: Source::new(loc.ast_id.file_id, arg_tt), |
97 | def_start, | 92 | def: Source::new(loc.ast_id.file_id, def_tt), |
98 | macro_arg, | 93 | macro_arg, |
99 | macro_def, | 94 | macro_def, |
100 | exp_map, | 95 | exp_map, |
@@ -159,8 +154,8 @@ impl MacroCallId { | |||
159 | #[derive(Debug, Clone, PartialEq, Eq)] | 154 | #[derive(Debug, Clone, PartialEq, Eq)] |
160 | pub struct ExpansionInfo { | 155 | pub struct ExpansionInfo { |
161 | expanded: Source<SyntaxNode>, | 156 | expanded: Source<SyntaxNode>, |
162 | arg_start: (HirFileId, TextUnit), | 157 | arg: Source<ast::TokenTree>, |
163 | def_start: (HirFileId, TextUnit), | 158 | def: Source<ast::TokenTree>, |
164 | 159 | ||
165 | macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, | 160 | macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, |
166 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, | 161 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, |
@@ -169,8 +164,9 @@ pub struct ExpansionInfo { | |||
169 | 164 | ||
170 | impl ExpansionInfo { | 165 | impl ExpansionInfo { |
171 | pub fn map_token_down(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> { | 166 | pub fn map_token_down(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> { |
172 | assert_eq!(token.file_id, self.arg_start.0); | 167 | assert_eq!(token.file_id, self.arg.file_id); |
173 | let range = token.ast.text_range().checked_sub(self.arg_start.1)?; | 168 | let range = |
169 | token.ast.text_range().checked_sub(self.arg.ast.syntax().text_range().start())?; | ||
174 | let token_id = self.macro_arg.1.token_by_range(range)?; | 170 | let token_id = self.macro_arg.1.token_by_range(range)?; |
175 | let token_id = self.macro_def.0.map_id_down(token_id); | 171 | let token_id = self.macro_def.0.map_id_down(token_id); |
176 | 172 | ||
@@ -181,25 +177,22 @@ impl ExpansionInfo { | |||
181 | Some(self.expanded.with_ast(token)) | 177 | Some(self.expanded.with_ast(token)) |
182 | } | 178 | } |
183 | 179 | ||
184 | // FIXME: a more correct signature would be | 180 | pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> { |
185 | // `pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>>` | 181 | let token_id = self.exp_map.token_by_range(token.ast.text_range())?; |
186 | pub fn find_range(&self, from: TextRange) -> Option<(HirFileId, TextRange)> { | ||
187 | let token_id = look_in_rev_map(&self.exp_map, from)?; | ||
188 | 182 | ||
189 | let (token_id, origin) = self.macro_def.0.map_id_up(token_id); | 183 | let (token_id, origin) = self.macro_def.0.map_id_up(token_id); |
190 | 184 | let (token_map, tt) = match origin { | |
191 | let (token_map, (file_id, start_offset)) = match origin { | 185 | mbe::Origin::Call => (&self.macro_arg.1, &self.arg), |
192 | mbe::Origin::Call => (&self.macro_arg.1, self.arg_start), | 186 | mbe::Origin::Def => (&self.macro_def.1, &self.def), |
193 | mbe::Origin::Def => (&self.macro_def.1, self.def_start), | ||
194 | }; | 187 | }; |
195 | 188 | ||
196 | let range = token_map.relative_range_of(token_id)?; | 189 | let range = token_map.relative_range_of(token_id)?; |
197 | 190 | let token = algo::find_covering_element( | |
198 | return Some((file_id, range + start_offset)); | 191 | tt.ast.syntax(), |
199 | 192 | range + tt.ast.syntax().text_range().start(), | |
200 | fn look_in_rev_map(exp_map: &mbe::RevTokenMap, from: TextRange) -> Option<tt::TokenId> { | 193 | ) |
201 | exp_map.ranges.iter().find(|&it| it.0.is_subrange(&from)).map(|it| it.1) | 194 | .into_token()?; |
202 | } | 195 | Some(tt.with_ast(token)) |
203 | } | 196 | } |
204 | } | 197 | } |
205 | 198 | ||
diff --git a/crates/ra_ide_api/src/expand.rs b/crates/ra_ide_api/src/expand.rs index 5f1fb9a12..7f59e46d2 100644 --- a/crates/ra_ide_api/src/expand.rs +++ b/crates/ra_ide_api/src/expand.rs | |||
@@ -8,15 +8,32 @@ use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken}; | |||
8 | use crate::{db::RootDatabase, FileRange}; | 8 | use crate::{db::RootDatabase, FileRange}; |
9 | 9 | ||
10 | pub(crate) fn original_range(db: &RootDatabase, node: Source<&SyntaxNode>) -> FileRange { | 10 | pub(crate) fn original_range(db: &RootDatabase, node: Source<&SyntaxNode>) -> FileRange { |
11 | let text_range = node.ast.text_range(); | 11 | let expansion = match node.file_id.expansion_info(db) { |
12 | let (file_id, range) = node | 12 | None => { |
13 | .file_id | 13 | return FileRange { |
14 | .expansion_info(db) | 14 | file_id: node.file_id.original_file(db), |
15 | .and_then(|expansion_info| expansion_info.find_range(text_range)) | 15 | range: node.ast.text_range(), |
16 | .unwrap_or((node.file_id, text_range)); | 16 | } |
17 | } | ||
18 | Some(it) => it, | ||
19 | }; | ||
20 | // FIXME: the following completely wrong. | ||
21 | // | ||
22 | // *First*, we should try to map first and last tokens of node, and, if that | ||
23 | // fails, return the range of the overall macro expansions. | ||
24 | // | ||
25 | // *Second*, we should handle recurside macro expansions | ||
26 | |||
27 | let token = node | ||
28 | .ast | ||
29 | .descendants_with_tokens() | ||
30 | .filter_map(|it| it.into_token()) | ||
31 | .find_map(|it| expansion.map_token_up(node.with_ast(&it))); | ||
17 | 32 | ||
18 | // FIXME: handle recursive macro generated macro | 33 | match token { |
19 | FileRange { file_id: file_id.original_file(db), range } | 34 | Some(it) => FileRange { file_id: it.file_id.original_file(db), range: it.ast.text_range() }, |
35 | None => FileRange { file_id: node.file_id.original_file(db), range: node.ast.text_range() }, | ||
36 | } | ||
20 | } | 37 | } |
21 | 38 | ||
22 | pub(crate) fn descend_into_macros( | 39 | pub(crate) fn descend_into_macros( |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 8398c9ac7..fe3b70b8d 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -20,7 +20,7 @@ pub struct TokenMap { | |||
20 | /// Maps relative range of the expanded syntax node to `tt::TokenId` | 20 | /// Maps relative range of the expanded syntax node to `tt::TokenId` |
21 | #[derive(Debug, PartialEq, Eq, Default)] | 21 | #[derive(Debug, PartialEq, Eq, Default)] |
22 | pub struct RevTokenMap { | 22 | pub struct RevTokenMap { |
23 | pub ranges: Vec<(TextRange, tt::TokenId)>, | 23 | ranges: Vec<(TextRange, tt::TokenId)>, |
24 | } | 24 | } |
25 | 25 | ||
26 | /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro | 26 | /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro |
@@ -96,6 +96,10 @@ impl TokenMap { | |||
96 | } | 96 | } |
97 | 97 | ||
98 | impl RevTokenMap { | 98 | impl RevTokenMap { |
99 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { | ||
100 | self.ranges.iter().find(|&it| it.0 == relative_range).map(|it| it.1) | ||
101 | } | ||
102 | |||
99 | pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TextRange> { | 103 | pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TextRange> { |
100 | let &(r, _) = self.ranges.iter().find(|(_, tid)| *tid == token_id)?; | 104 | let &(r, _) = self.ranges.iter().find(|(_, tid)| *tid == token_id)?; |
101 | Some(r) | 105 | Some(r) |