diff options
author | bors[bot] <26634292+bors[bot]@users.noreply.github.com> | 2019-11-09 09:13:14 +0000 |
---|---|---|
committer | GitHub <[email protected]> | 2019-11-09 09:13:14 +0000 |
commit | 561bb979cecd786f5d311ea7bddb1e15d77a3848 (patch) | |
tree | a6aaa81c3acbfc5e6add5ebbee9abbc0f8357014 /crates/ra_mbe/src | |
parent | 23939cabcc10ecc045a97361df182b9b4db32953 (diff) | |
parent | 0a5ec69404a2556dd82e5bb00b295aebaa291f04 (diff) |
Merge #2169
2169: MBE: Mapping spans for goto definition r=matklad a=edwin0cheng
Currently, go to definition gives the wrong span in MBE. This PR implement a mapping mechanism to fix it and it could be used for future MBE hygiene implementation.
The basic idea of the mapping is:
1. When expanding the macro, generated 2 `TokenMap` which maps the macro args and macro defs between tokens and input text-ranges.
2. Before converting generated `TokenTree` to `SyntaxNode`, generated a `ExpandedRangeMap` which is a mapping between token and output text-ranges.
3. Using these 3 mappings to construct an `ExpansionInfo` which can map between input text ranges and output text ranges.
Co-authored-by: Edwin Cheng <[email protected]>
Diffstat (limited to 'crates/ra_mbe/src')
-rw-r--r-- | crates/ra_mbe/src/lib.rs | 6 | ||||
-rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 83 | ||||
-rw-r--r-- | crates/ra_mbe/src/tests.rs | 16 |
3 files changed, 66 insertions, 39 deletions
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs index 15f000175..70a289f09 100644 --- a/crates/ra_mbe/src/lib.rs +++ b/crates/ra_mbe/src/lib.rs | |||
@@ -32,7 +32,7 @@ pub enum ExpandError { | |||
32 | 32 | ||
33 | pub use crate::syntax_bridge::{ | 33 | pub use crate::syntax_bridge::{ |
34 | ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_expr, token_tree_to_items, | 34 | ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_expr, token_tree_to_items, |
35 | token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty, | 35 | token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty, RevTokenMap, TokenMap, |
36 | }; | 36 | }; |
37 | 37 | ||
38 | /// This struct contains AST for a single `macro_rules` definition. What might | 38 | /// This struct contains AST for a single `macro_rules` definition. What might |
@@ -118,6 +118,10 @@ impl MacroRules { | |||
118 | shift_subtree(&mut tt, self.shift); | 118 | shift_subtree(&mut tt, self.shift); |
119 | mbe_expander::expand(self, &tt) | 119 | mbe_expander::expand(self, &tt) |
120 | } | 120 | } |
121 | |||
122 | pub fn shift(&self) -> u32 { | ||
123 | self.shift | ||
124 | } | ||
121 | } | 125 | } |
122 | 126 | ||
123 | impl Rule { | 127 | impl Rule { |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 592fcf527..9653f7fef 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -14,12 +14,18 @@ use crate::subtree_source::SubtreeTokenSource; | |||
14 | use crate::ExpandError; | 14 | use crate::ExpandError; |
15 | 15 | ||
16 | /// Maps `tt::TokenId` to the relative range of the original token. | 16 | /// Maps `tt::TokenId` to the relative range of the original token. |
17 | #[derive(Default)] | 17 | #[derive(Debug, PartialEq, Eq, Default)] |
18 | pub struct TokenMap { | 18 | pub struct TokenMap { |
19 | /// Maps `tt::TokenId` to the *relative* source range. | 19 | /// Maps `tt::TokenId` to the *relative* source range. |
20 | tokens: Vec<TextRange>, | 20 | tokens: Vec<TextRange>, |
21 | } | 21 | } |
22 | 22 | ||
23 | /// Maps relative range of the expanded syntax node to `tt::TokenId` | ||
24 | #[derive(Debug, PartialEq, Eq, Default)] | ||
25 | pub struct RevTokenMap { | ||
26 | pub ranges: Vec<(TextRange, tt::TokenId)>, | ||
27 | } | ||
28 | |||
23 | /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro | 29 | /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro |
24 | /// will consume). | 30 | /// will consume). |
25 | pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> { | 31 | pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> { |
@@ -52,7 +58,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke | |||
52 | fn fragment_to_syntax_node( | 58 | fn fragment_to_syntax_node( |
53 | tt: &tt::Subtree, | 59 | tt: &tt::Subtree, |
54 | fragment_kind: FragmentKind, | 60 | fragment_kind: FragmentKind, |
55 | ) -> Result<Parse<SyntaxNode>, ExpandError> { | 61 | ) -> Result<(Parse<SyntaxNode>, RevTokenMap), ExpandError> { |
56 | let tmp; | 62 | let tmp; |
57 | let tokens = match tt { | 63 | let tokens = match tt { |
58 | tt::Subtree { delimiter: tt::Delimiter::None, token_trees } => token_trees.as_slice(), | 64 | tt::Subtree { delimiter: tt::Delimiter::None, token_trees } => token_trees.as_slice(), |
@@ -69,38 +75,33 @@ fn fragment_to_syntax_node( | |||
69 | return Err(ExpandError::ConversionError); | 75 | return Err(ExpandError::ConversionError); |
70 | } | 76 | } |
71 | //FIXME: would be cool to report errors | 77 | //FIXME: would be cool to report errors |
72 | let parse = tree_sink.inner.finish(); | 78 | let (parse, range_map) = tree_sink.finish(); |
73 | Ok(parse) | 79 | Ok((parse, range_map)) |
74 | } | 80 | } |
75 | 81 | ||
76 | /// Parses the token tree (result of macro expansion) to an expression | 82 | macro_rules! impl_token_tree_conversions { |
77 | pub fn token_tree_to_expr(tt: &tt::Subtree) -> Result<Parse<ast::Expr>, ExpandError> { | 83 | ($($(#[$attr:meta])* $name:ident => ($kind:ident, $t:ty) ),*) => { |
78 | let parse = fragment_to_syntax_node(tt, Expr)?; | 84 | $( |
79 | parse.cast().ok_or_else(|| crate::ExpandError::ConversionError) | 85 | $(#[$attr])* |
80 | } | 86 | pub fn $name(tt: &tt::Subtree) -> Result<(Parse<$t>, RevTokenMap), ExpandError> { |
81 | 87 | let (parse, map) = fragment_to_syntax_node(tt, $kind)?; | |
82 | /// Parses the token tree (result of macro expansion) to a Pattern | 88 | parse.cast().ok_or_else(|| crate::ExpandError::ConversionError).map(|p| (p, map)) |
83 | pub fn token_tree_to_pat(tt: &tt::Subtree) -> Result<Parse<ast::Pat>, ExpandError> { | 89 | } |
84 | let parse = fragment_to_syntax_node(tt, Pattern)?; | 90 | )* |
85 | parse.cast().ok_or_else(|| crate::ExpandError::ConversionError) | 91 | } |
86 | } | ||
87 | |||
88 | /// Parses the token tree (result of macro expansion) to a Type | ||
89 | pub fn token_tree_to_ty(tt: &tt::Subtree) -> Result<Parse<ast::TypeRef>, ExpandError> { | ||
90 | let parse = fragment_to_syntax_node(tt, Type)?; | ||
91 | parse.cast().ok_or_else(|| crate::ExpandError::ConversionError) | ||
92 | } | ||
93 | |||
94 | /// Parses the token tree (result of macro expansion) as a sequence of stmts | ||
95 | pub fn token_tree_to_macro_stmts(tt: &tt::Subtree) -> Result<Parse<ast::MacroStmts>, ExpandError> { | ||
96 | let parse = fragment_to_syntax_node(tt, Statements)?; | ||
97 | parse.cast().ok_or_else(|| crate::ExpandError::ConversionError) | ||
98 | } | 92 | } |
99 | 93 | ||
100 | /// Parses the token tree (result of macro expansion) as a sequence of items | 94 | impl_token_tree_conversions! { |
101 | pub fn token_tree_to_items(tt: &tt::Subtree) -> Result<Parse<ast::MacroItems>, ExpandError> { | 95 | /// Parses the token tree (result of macro expansion) to an expression |
102 | let parse = fragment_to_syntax_node(tt, Items)?; | 96 | token_tree_to_expr => (Expr, ast::Expr), |
103 | parse.cast().ok_or_else(|| crate::ExpandError::ConversionError) | 97 | /// Parses the token tree (result of macro expansion) to a Pattern |
98 | token_tree_to_pat => (Pattern, ast::Pat), | ||
99 | /// Parses the token tree (result of macro expansion) to a Type | ||
100 | token_tree_to_ty => (Type, ast::TypeRef), | ||
101 | /// Parses the token tree (result of macro expansion) as a sequence of stmts | ||
102 | token_tree_to_macro_stmts => (Statements, ast::MacroStmts), | ||
103 | /// Parses the token tree (result of macro expansion) as a sequence of items | ||
104 | token_tree_to_items => (Items, ast::MacroItems) | ||
104 | } | 105 | } |
105 | 106 | ||
106 | impl TokenMap { | 107 | impl TokenMap { |
@@ -116,6 +117,12 @@ impl TokenMap { | |||
116 | } | 117 | } |
117 | } | 118 | } |
118 | 119 | ||
120 | impl RevTokenMap { | ||
121 | fn add(&mut self, relative_range: TextRange, token_id: tt::TokenId) { | ||
122 | self.ranges.push((relative_range, token_id.clone())) | ||
123 | } | ||
124 | } | ||
125 | |||
119 | /// Returns the textual content of a doc comment block as a quoted string | 126 | /// Returns the textual content of a doc comment block as a quoted string |
120 | /// That is, strips leading `///` (or `/**`, etc) | 127 | /// That is, strips leading `///` (or `/**`, etc) |
121 | /// and strips the ending `*/` | 128 | /// and strips the ending `*/` |
@@ -262,6 +269,7 @@ struct TtTreeSink<'a> { | |||
262 | cursor: Cursor<'a>, | 269 | cursor: Cursor<'a>, |
263 | text_pos: TextUnit, | 270 | text_pos: TextUnit, |
264 | inner: SyntaxTreeBuilder, | 271 | inner: SyntaxTreeBuilder, |
272 | range_map: RevTokenMap, | ||
265 | 273 | ||
266 | // Number of roots | 274 | // Number of roots |
267 | // Use for detect ill-form tree which is not single root | 275 | // Use for detect ill-form tree which is not single root |
@@ -276,8 +284,13 @@ impl<'a> TtTreeSink<'a> { | |||
276 | text_pos: 0.into(), | 284 | text_pos: 0.into(), |
277 | inner: SyntaxTreeBuilder::default(), | 285 | inner: SyntaxTreeBuilder::default(), |
278 | roots: smallvec::SmallVec::new(), | 286 | roots: smallvec::SmallVec::new(), |
287 | range_map: RevTokenMap::default(), | ||
279 | } | 288 | } |
280 | } | 289 | } |
290 | |||
291 | fn finish(self) -> (Parse<SyntaxNode>, RevTokenMap) { | ||
292 | (self.inner.finish(), self.range_map) | ||
293 | } | ||
281 | } | 294 | } |
282 | 295 | ||
283 | fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr { | 296 | fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr { |
@@ -307,6 +320,15 @@ impl<'a> TreeSink for TtTreeSink<'a> { | |||
307 | 320 | ||
308 | match self.cursor.token_tree() { | 321 | match self.cursor.token_tree() { |
309 | Some(tt::TokenTree::Leaf(leaf)) => { | 322 | Some(tt::TokenTree::Leaf(leaf)) => { |
323 | // Mark the range if needed | ||
324 | if let tt::Leaf::Ident(ident) = leaf { | ||
325 | if kind == IDENT { | ||
326 | let range = | ||
327 | TextRange::offset_len(self.text_pos, TextUnit::of_str(&ident.text)); | ||
328 | self.range_map.add(range, ident.id); | ||
329 | } | ||
330 | } | ||
331 | |||
310 | self.cursor = self.cursor.bump(); | 332 | self.cursor = self.cursor.bump(); |
311 | self.buf += &format!("{}", leaf); | 333 | self.buf += &format!("{}", leaf); |
312 | } | 334 | } |
@@ -337,6 +359,7 @@ impl<'a> TreeSink for TtTreeSink<'a> { | |||
337 | { | 359 | { |
338 | if curr.spacing == tt::Spacing::Alone { | 360 | if curr.spacing == tt::Spacing::Alone { |
339 | self.inner.token(WHITESPACE, " ".into()); | 361 | self.inner.token(WHITESPACE, " ".into()); |
362 | self.text_pos += TextUnit::of_char(' '); | ||
340 | } | 363 | } |
341 | } | 364 | } |
342 | } | 365 | } |
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs index a23e3afe3..a848ea334 100644 --- a/crates/ra_mbe/src/tests.rs +++ b/crates/ra_mbe/src/tests.rs | |||
@@ -126,7 +126,7 @@ fn test_expr_order() { | |||
126 | "#, | 126 | "#, |
127 | ); | 127 | ); |
128 | let expanded = expand(&rules, "foo! { 1 + 1}"); | 128 | let expanded = expand(&rules, "foo! { 1 + 1}"); |
129 | let tree = token_tree_to_items(&expanded).unwrap().tree(); | 129 | let tree = token_tree_to_items(&expanded).unwrap().0.tree(); |
130 | 130 | ||
131 | let dump = format!("{:#?}", tree.syntax()); | 131 | let dump = format!("{:#?}", tree.syntax()); |
132 | assert_eq_text!( | 132 | assert_eq_text!( |
@@ -383,7 +383,7 @@ fn test_expand_to_item_list() { | |||
383 | ", | 383 | ", |
384 | ); | 384 | ); |
385 | let expansion = expand(&rules, "structs!(Foo, Bar);"); | 385 | let expansion = expand(&rules, "structs!(Foo, Bar);"); |
386 | let tree = token_tree_to_items(&expansion).unwrap().tree(); | 386 | let tree = token_tree_to_items(&expansion).unwrap().0.tree(); |
387 | assert_eq!( | 387 | assert_eq!( |
388 | format!("{:#?}", tree.syntax()).trim(), | 388 | format!("{:#?}", tree.syntax()).trim(), |
389 | r#" | 389 | r#" |
@@ -501,7 +501,7 @@ fn test_tt_to_stmts() { | |||
501 | ); | 501 | ); |
502 | 502 | ||
503 | let expanded = expand(&rules, "foo!{}"); | 503 | let expanded = expand(&rules, "foo!{}"); |
504 | let stmts = token_tree_to_macro_stmts(&expanded).unwrap().tree(); | 504 | let stmts = token_tree_to_macro_stmts(&expanded).unwrap().0.tree(); |
505 | 505 | ||
506 | assert_eq!( | 506 | assert_eq!( |
507 | format!("{:#?}", stmts.syntax()).trim(), | 507 | format!("{:#?}", stmts.syntax()).trim(), |
@@ -946,7 +946,7 @@ fn test_vec() { | |||
946 | ); | 946 | ); |
947 | 947 | ||
948 | let expansion = expand(&rules, r#"vec![1u32,2];"#); | 948 | let expansion = expand(&rules, r#"vec![1u32,2];"#); |
949 | let tree = token_tree_to_expr(&expansion).unwrap().tree(); | 949 | let tree = token_tree_to_expr(&expansion).unwrap().0.tree(); |
950 | 950 | ||
951 | assert_eq!( | 951 | assert_eq!( |
952 | format!("{:#?}", tree.syntax()).trim(), | 952 | format!("{:#?}", tree.syntax()).trim(), |
@@ -1436,8 +1436,8 @@ pub(crate) fn assert_expansion( | |||
1436 | }; | 1436 | }; |
1437 | let (expanded_tree, expected_tree) = match kind { | 1437 | let (expanded_tree, expected_tree) = match kind { |
1438 | MacroKind::Items => { | 1438 | MacroKind::Items => { |
1439 | let expanded_tree = token_tree_to_items(&expanded).unwrap().tree(); | 1439 | let expanded_tree = token_tree_to_items(&expanded).unwrap().0.tree(); |
1440 | let expected_tree = token_tree_to_items(&expected).unwrap().tree(); | 1440 | let expected_tree = token_tree_to_items(&expected).unwrap().0.tree(); |
1441 | 1441 | ||
1442 | ( | 1442 | ( |
1443 | debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), | 1443 | debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), |
@@ -1446,8 +1446,8 @@ pub(crate) fn assert_expansion( | |||
1446 | } | 1446 | } |
1447 | 1447 | ||
1448 | MacroKind::Stmts => { | 1448 | MacroKind::Stmts => { |
1449 | let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().tree(); | 1449 | let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().0.tree(); |
1450 | let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().tree(); | 1450 | let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().0.tree(); |
1451 | 1451 | ||
1452 | ( | 1452 | ( |
1453 | debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), | 1453 | debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), |