aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/ra_hir_expand/src/db.rs93
-rw-r--r--crates/ra_hir_expand/src/lib.rs35
-rw-r--r--crates/ra_mbe/src/lib.rs2
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs113
-rw-r--r--crates/ra_mbe/src/tests.rs16
5 files changed, 212 insertions, 47 deletions
diff --git a/crates/ra_hir_expand/src/db.rs b/crates/ra_hir_expand/src/db.rs
index a4ee9a529..8abfbb4ff 100644
--- a/crates/ra_hir_expand/src/db.rs
+++ b/crates/ra_hir_expand/src/db.rs
@@ -8,10 +8,16 @@ use ra_prof::profile;
8use ra_syntax::{AstNode, Parse, SyntaxNode}; 8use ra_syntax::{AstNode, Parse, SyntaxNode};
9 9
10use crate::{ 10use crate::{
11 ast_id_map::AstIdMap, HirFileId, HirFileIdRepr, MacroCallId, MacroCallLoc, MacroDefId, 11 ast_id_map::AstIdMap, ExpansionInfo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallLoc,
12 MacroFile, MacroFileKind, 12 MacroDefId, MacroFile, MacroFileKind,
13}; 13};
14 14
15#[derive(Debug, PartialEq, Eq, Clone)]
16pub struct ParseMacroWithInfo {
17 pub parsed: Parse<SyntaxNode>,
18 pub expansion_info: Arc<ExpansionInfo>,
19}
20
15// FIXME: rename to ExpandDatabase 21// FIXME: rename to ExpandDatabase
16#[salsa::query_group(AstDatabaseStorage)] 22#[salsa::query_group(AstDatabaseStorage)]
17pub trait AstDatabase: SourceDatabase { 23pub trait AstDatabase: SourceDatabase {
@@ -22,10 +28,16 @@ pub trait AstDatabase: SourceDatabase {
22 28
23 #[salsa::interned] 29 #[salsa::interned]
24 fn intern_macro(&self, macro_call: MacroCallLoc) -> MacroCallId; 30 fn intern_macro(&self, macro_call: MacroCallLoc) -> MacroCallId;
25 fn macro_arg(&self, id: MacroCallId) -> Option<Arc<tt::Subtree>>; 31 fn macro_arg(&self, id: MacroCallId) -> Option<(Arc<tt::Subtree>, Arc<mbe::TokenMap>)>;
26 fn macro_def(&self, id: MacroDefId) -> Option<Arc<mbe::MacroRules>>; 32 fn macro_def(&self, id: MacroDefId) -> Option<(Arc<mbe::MacroRules>, Arc<mbe::TokenMap>)>;
27 fn parse_macro(&self, macro_file: MacroFile) -> Option<Parse<SyntaxNode>>; 33 fn parse_macro(&self, macro_file: MacroFile) -> Option<Parse<SyntaxNode>>;
28 fn macro_expand(&self, macro_call: MacroCallId) -> Result<Arc<tt::Subtree>, String>; 34 fn parse_macro_with_info(&self, macro_file: MacroFile) -> Option<ParseMacroWithInfo>;
35 fn macro_expand(
36 &self,
37 macro_call: MacroCallId,
38 ) -> Result<(Arc<tt::Subtree>, (Arc<mbe::TokenMap>, Arc<mbe::TokenMap>)), String>;
39
40 fn macro_expansion_info(&self, macro_file: MacroFile) -> Option<Arc<ExpansionInfo>>;
29} 41}
30 42
31pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { 43pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
@@ -34,10 +46,13 @@ pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdM
34 Arc::new(map) 46 Arc::new(map)
35} 47}
36 48
37pub(crate) fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> { 49pub(crate) fn macro_def(
50 db: &dyn AstDatabase,
51 id: MacroDefId,
52) -> Option<(Arc<mbe::MacroRules>, Arc<mbe::TokenMap>)> {
38 let macro_call = id.ast_id.to_node(db); 53 let macro_call = id.ast_id.to_node(db);
39 let arg = macro_call.token_tree()?; 54 let arg = macro_call.token_tree()?;
40 let (tt, _) = mbe::ast_to_token_tree(&arg).or_else(|| { 55 let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| {
41 log::warn!("fail on macro_def to token tree: {:#?}", arg); 56 log::warn!("fail on macro_def to token tree: {:#?}", arg);
42 None 57 None
43 })?; 58 })?;
@@ -45,32 +60,36 @@ pub(crate) fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<Macr
45 log::warn!("fail on macro_def parse: {:#?}", tt); 60 log::warn!("fail on macro_def parse: {:#?}", tt);
46 None 61 None
47 })?; 62 })?;
48 Some(Arc::new(rules)) 63 Some((Arc::new(rules), Arc::new(tmap)))
49} 64}
50 65
51pub(crate) fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<tt::Subtree>> { 66pub(crate) fn macro_arg(
67 db: &dyn AstDatabase,
68 id: MacroCallId,
69) -> Option<(Arc<tt::Subtree>, Arc<mbe::TokenMap>)> {
52 let loc = db.lookup_intern_macro(id); 70 let loc = db.lookup_intern_macro(id);
53 let macro_call = loc.ast_id.to_node(db); 71 let macro_call = loc.ast_id.to_node(db);
54 let arg = macro_call.token_tree()?; 72 let arg = macro_call.token_tree()?;
55 let (tt, _) = mbe::ast_to_token_tree(&arg)?; 73 let (tt, tmap) = mbe::ast_to_token_tree(&arg)?;
56 Some(Arc::new(tt)) 74 Some((Arc::new(tt), Arc::new(tmap)))
57} 75}
58 76
59pub(crate) fn macro_expand( 77pub(crate) fn macro_expand(
60 db: &dyn AstDatabase, 78 db: &dyn AstDatabase,
61 id: MacroCallId, 79 id: MacroCallId,
62) -> Result<Arc<tt::Subtree>, String> { 80) -> Result<(Arc<tt::Subtree>, (Arc<mbe::TokenMap>, Arc<mbe::TokenMap>)), String> {
63 let loc = db.lookup_intern_macro(id); 81 let loc = db.lookup_intern_macro(id);
64 let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?; 82 let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?;
65 83
66 let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?; 84 let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?;
67 let tt = macro_rules.expand(&macro_arg).map_err(|err| format!("{:?}", err))?; 85 let tt = macro_rules.0.expand(&macro_arg.0).map_err(|err| format!("{:?}", err))?;
68 // Set a hard limit for the expanded tt 86 // Set a hard limit for the expanded tt
69 let count = tt.count(); 87 let count = tt.count();
70 if count > 65536 { 88 if count > 65536 {
71 return Err(format!("Total tokens count exceed limit : count = {}", count)); 89 return Err(format!("Total tokens count exceed limit : count = {}", count));
72 } 90 }
73 Ok(Arc::new(tt)) 91
92 Ok((Arc::new(tt), (macro_arg.1.clone(), macro_rules.1.clone())))
74} 93}
75 94
76pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> { 95pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
@@ -87,6 +106,13 @@ pub(crate) fn parse_macro(
87 macro_file: MacroFile, 106 macro_file: MacroFile,
88) -> Option<Parse<SyntaxNode>> { 107) -> Option<Parse<SyntaxNode>> {
89 let _p = profile("parse_macro_query"); 108 let _p = profile("parse_macro_query");
109 db.parse_macro_with_info(macro_file).map(|r| r.parsed)
110}
111
112pub(crate) fn parse_macro_with_info(
113 db: &dyn AstDatabase,
114 macro_file: MacroFile,
115) -> Option<ParseMacroWithInfo> {
90 let macro_call_id = macro_file.macro_call_id; 116 let macro_call_id = macro_file.macro_call_id;
91 let tt = db 117 let tt = db
92 .macro_expand(macro_call_id) 118 .macro_expand(macro_call_id)
@@ -97,8 +123,39 @@ pub(crate) fn parse_macro(
97 log::warn!("fail on macro_parse: (reason: {})", err,); 123 log::warn!("fail on macro_parse: (reason: {})", err,);
98 }) 124 })
99 .ok()?; 125 .ok()?;
100 match macro_file.macro_file_kind { 126 let res = match macro_file.macro_file_kind {
101 MacroFileKind::Items => mbe::token_tree_to_items(&tt).ok().map(Parse::to_syntax), 127 MacroFileKind::Items => {
102 MacroFileKind::Expr => mbe::token_tree_to_expr(&tt).ok().map(Parse::to_syntax), 128 mbe::token_tree_to_items(&tt.0).ok().map(|(p, map)| (Parse::to_syntax(p), map))
103 } 129 }
130 MacroFileKind::Expr => {
131 mbe::token_tree_to_expr(&tt.0).ok().map(|(p, map)| (Parse::to_syntax(p), map))
132 }
133 };
134
135 res.map(|(parsed, exp_map)| {
136 let (arg_map, def_map) = tt.1;
137 let loc: MacroCallLoc = db.lookup_intern_macro(macro_call_id);
138
139 let def_start =
140 loc.def.ast_id.to_node(db).token_tree().map(|t| t.syntax().text_range().start());
141 let arg_start =
142 loc.ast_id.to_node(db).token_tree().map(|t| t.syntax().text_range().start());
143
144 let arg_map =
145 arg_start.map(|start| exp_map.ranges(&arg_map, start)).unwrap_or_else(|| Vec::new());
146
147 let def_map =
148 def_start.map(|start| exp_map.ranges(&def_map, start)).unwrap_or_else(|| Vec::new());
149
150 let info = ExpansionInfo { arg_map, def_map };
151
152 ParseMacroWithInfo { parsed, expansion_info: Arc::new(info) }
153 })
154}
155
156pub(crate) fn macro_expansion_info(
157 db: &dyn AstDatabase,
158 macro_file: MacroFile,
159) -> Option<Arc<ExpansionInfo>> {
160 db.parse_macro_with_info(macro_file).map(|res| res.expansion_info.clone())
104} 161}
diff --git a/crates/ra_hir_expand/src/lib.rs b/crates/ra_hir_expand/src/lib.rs
index dd07a16b4..194020b45 100644
--- a/crates/ra_hir_expand/src/lib.rs
+++ b/crates/ra_hir_expand/src/lib.rs
@@ -16,7 +16,7 @@ use std::hash::{Hash, Hasher};
16use ra_db::{salsa, CrateId, FileId}; 16use ra_db::{salsa, CrateId, FileId};
17use ra_syntax::{ 17use ra_syntax::{
18 ast::{self, AstNode}, 18 ast::{self, AstNode},
19 SyntaxNode, 19 SyntaxNode, TextRange,
20}; 20};
21 21
22use crate::ast_id_map::FileAstId; 22use crate::ast_id_map::FileAstId;
@@ -112,6 +112,39 @@ impl MacroCallId {
112 } 112 }
113} 113}
114 114
115#[derive(Debug, Clone, PartialEq, Eq)]
116/// ExpansionInfo mainly describle how to map text range between src and expaned macro
117pub struct ExpansionInfo {
118 pub arg_map: Vec<(TextRange, TextRange)>,
119 pub def_map: Vec<(TextRange, TextRange)>,
120}
121
122impl ExpansionInfo {
123 pub fn find_range(
124 &self,
125 from: TextRange,
126 (arg_file_id, def_file_id): (HirFileId, HirFileId),
127 ) -> Option<(HirFileId, TextRange)> {
128 for (src, dest) in &self.arg_map {
129 dbg!((src, *dest, "arg_map"));
130 if src.is_subrange(&from) {
131 dbg!((arg_file_id, *dest));
132 return Some((arg_file_id, *dest));
133 }
134 }
135
136 for (src, dest) in &self.def_map {
137 dbg!((src, *dest, "def_map"));
138 if src.is_subrange(&from) {
139 dbg!((arg_file_id, *dest));
140 return Some((def_file_id, *dest));
141 }
142 }
143
144 None
145 }
146}
147
115/// `AstId` points to an AST node in any file. 148/// `AstId` points to an AST node in any file.
116/// 149///
117/// It is stable across reparses, and can be used as salsa key/value. 150/// It is stable across reparses, and can be used as salsa key/value.
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs
index 15f000175..2926b29fd 100644
--- a/crates/ra_mbe/src/lib.rs
+++ b/crates/ra_mbe/src/lib.rs
@@ -32,7 +32,7 @@ pub enum ExpandError {
32 32
33pub use crate::syntax_bridge::{ 33pub use crate::syntax_bridge::{
34 ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_expr, token_tree_to_items, 34 ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_expr, token_tree_to_items,
35 token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty, 35 token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty, TokenMap,
36}; 36};
37 37
38/// This struct contains AST for a single `macro_rules` definition. What might 38/// This struct contains AST for a single `macro_rules` definition. What might
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index 8d9217518..5db6647e3 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -15,6 +15,7 @@ use crate::ExpandError;
15use std::sync::atomic::{AtomicU32, Ordering}; 15use std::sync::atomic::{AtomicU32, Ordering};
16 16
17/// Maps `tt::TokenId` to the relative range of the original token. 17/// Maps `tt::TokenId` to the relative range of the original token.
18#[derive(Debug, PartialEq, Eq)]
18pub struct TokenMap { 19pub struct TokenMap {
19 /// Maps `tt::TokenId` to the *relative* source range. 20 /// Maps `tt::TokenId` to the *relative* source range.
20 tokens: Vec<TextRange>, 21 tokens: Vec<TextRange>,
@@ -34,6 +35,13 @@ impl std::default::Default for TokenMap {
34 } 35 }
35} 36}
36 37
38/// Maps Relative range of the expanded syntax node to `tt::TokenId`
39#[derive(Debug, PartialEq, Eq, Default)]
40pub struct ExpandedRangeMap {
41 /// Maps `tt::TokenId` to the *relative* source range.
42 ranges: Vec<(TextRange, tt::TokenId)>,
43}
44
37/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro 45/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
38/// will consume). 46/// will consume).
39pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> { 47pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> {
@@ -66,7 +74,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke
66fn fragment_to_syntax_node( 74fn fragment_to_syntax_node(
67 tt: &tt::Subtree, 75 tt: &tt::Subtree,
68 fragment_kind: FragmentKind, 76 fragment_kind: FragmentKind,
69) -> Result<Parse<SyntaxNode>, ExpandError> { 77) -> Result<(Parse<SyntaxNode>, ExpandedRangeMap), ExpandError> {
70 let tmp; 78 let tmp;
71 let tokens = match tt { 79 let tokens = match tt {
72 tt::Subtree { delimiter: tt::Delimiter::None, token_trees } => token_trees.as_slice(), 80 tt::Subtree { delimiter: tt::Delimiter::None, token_trees } => token_trees.as_slice(),
@@ -77,44 +85,55 @@ fn fragment_to_syntax_node(
77 }; 85 };
78 let buffer = TokenBuffer::new(&tokens); 86 let buffer = TokenBuffer::new(&tokens);
79 let mut token_source = SubtreeTokenSource::new(&buffer); 87 let mut token_source = SubtreeTokenSource::new(&buffer);
80 let mut tree_sink = TtTreeSink::new(buffer.begin()); 88 let mut range_map = ExpandedRangeMap::default();
89 let mut tree_sink = TtTreeSink::new(buffer.begin(), &mut range_map);
81 ra_parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind); 90 ra_parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
82 if tree_sink.roots.len() != 1 { 91 if tree_sink.roots.len() != 1 {
83 return Err(ExpandError::ConversionError); 92 return Err(ExpandError::ConversionError);
84 } 93 }
85 //FIXME: would be cool to report errors 94 //FIXME: would be cool to report errors
86 let parse = tree_sink.inner.finish(); 95 let parse = tree_sink.inner.finish();
87 Ok(parse) 96 Ok((parse, range_map))
88} 97}
89 98
90/// Parses the token tree (result of macro expansion) to an expression 99/// Parses the token tree (result of macro expansion) to an expression
91pub fn token_tree_to_expr(tt: &tt::Subtree) -> Result<Parse<ast::Expr>, ExpandError> { 100pub fn token_tree_to_expr(
92 let parse = fragment_to_syntax_node(tt, Expr)?; 101 tt: &tt::Subtree,
93 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError) 102) -> Result<(Parse<ast::Expr>, ExpandedRangeMap), ExpandError> {
103 let (parse, map) = fragment_to_syntax_node(tt, Expr)?;
104 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError).map(|p| (p, map))
94} 105}
95 106
96/// Parses the token tree (result of macro expansion) to a Pattern 107/// Parses the token tree (result of macro expansion) to a Pattern
97pub fn token_tree_to_pat(tt: &tt::Subtree) -> Result<Parse<ast::Pat>, ExpandError> { 108pub fn token_tree_to_pat(
98 let parse = fragment_to_syntax_node(tt, Pattern)?; 109 tt: &tt::Subtree,
99 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError) 110) -> Result<(Parse<ast::Pat>, ExpandedRangeMap), ExpandError> {
111 let (parse, map) = fragment_to_syntax_node(tt, Pattern)?;
112 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError).map(|p| (p, map))
100} 113}
101 114
102/// Parses the token tree (result of macro expansion) to a Type 115/// Parses the token tree (result of macro expansion) to a Type
103pub fn token_tree_to_ty(tt: &tt::Subtree) -> Result<Parse<ast::TypeRef>, ExpandError> { 116pub fn token_tree_to_ty(
104 let parse = fragment_to_syntax_node(tt, Type)?; 117 tt: &tt::Subtree,
105 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError) 118) -> Result<(Parse<ast::TypeRef>, ExpandedRangeMap), ExpandError> {
119 let (parse, map) = fragment_to_syntax_node(tt, Type)?;
120 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError).map(|p| (p, map))
106} 121}
107 122
108/// Parses the token tree (result of macro expansion) as a sequence of stmts 123/// Parses the token tree (result of macro expansion) as a sequence of stmts
109pub fn token_tree_to_macro_stmts(tt: &tt::Subtree) -> Result<Parse<ast::MacroStmts>, ExpandError> { 124pub fn token_tree_to_macro_stmts(
110 let parse = fragment_to_syntax_node(tt, Statements)?; 125 tt: &tt::Subtree,
111 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError) 126) -> Result<(Parse<ast::MacroStmts>, ExpandedRangeMap), ExpandError> {
127 let (parse, map) = fragment_to_syntax_node(tt, Statements)?;
128 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError).map(|p| (p, map))
112} 129}
113 130
114/// Parses the token tree (result of macro expansion) as a sequence of items 131/// Parses the token tree (result of macro expansion) as a sequence of items
115pub fn token_tree_to_items(tt: &tt::Subtree) -> Result<Parse<ast::MacroItems>, ExpandError> { 132pub fn token_tree_to_items(
116 let parse = fragment_to_syntax_node(tt, Items)?; 133 tt: &tt::Subtree,
117 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError) 134) -> Result<(Parse<ast::MacroItems>, ExpandedRangeMap), ExpandError> {
135 let (parse, map) = fragment_to_syntax_node(tt, Items)?;
136 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError).map(|p| (p, map))
118} 137}
119 138
120impl TokenMap { 139impl TokenMap {
@@ -133,6 +152,28 @@ impl TokenMap {
133 } 152 }
134} 153}
135 154
155impl ExpandedRangeMap {
156 fn set(&mut self, relative_range: TextRange, token_id: &tt::TokenId) {
157 self.ranges.push((relative_range, token_id.clone()))
158 }
159
160 pub fn ranges(&self, to: &TokenMap) -> Vec<(TextRange, TextRange)> {
161 self.ranges
162 .iter()
163 .filter_map(|(r, tid)| {
164 if to.map_id == tid.map_id() {
165 return None;
166 }
167 if let Some(to_range) = to.relative_range_of(*tid) {
168 Some((*r, to_range))
169 } else {
170 None
171 }
172 })
173 .collect()
174 }
175}
176
136/// Returns the textual content of a doc comment block as a quoted string 177/// Returns the textual content of a doc comment block as a quoted string
137/// That is, strips leading `///` (or `/**`, etc) 178/// That is, strips leading `///` (or `/**`, etc)
138/// and strips the ending `*/` 179/// and strips the ending `*/`
@@ -279,6 +320,8 @@ struct TtTreeSink<'a> {
279 cursor: Cursor<'a>, 320 cursor: Cursor<'a>,
280 text_pos: TextUnit, 321 text_pos: TextUnit,
281 inner: SyntaxTreeBuilder, 322 inner: SyntaxTreeBuilder,
323 range_marker: Option<(TextRange, tt::TokenId)>,
324 range_map: &'a mut ExpandedRangeMap,
282 325
283 // Number of roots 326 // Number of roots
284 // Use for detect ill-form tree which is not single root 327 // Use for detect ill-form tree which is not single root
@@ -286,13 +329,15 @@ struct TtTreeSink<'a> {
286} 329}
287 330
288impl<'a> TtTreeSink<'a> { 331impl<'a> TtTreeSink<'a> {
289 fn new(cursor: Cursor<'a>) -> Self { 332 fn new(cursor: Cursor<'a>, range_map: &'a mut ExpandedRangeMap) -> Self {
290 TtTreeSink { 333 TtTreeSink {
291 buf: String::new(), 334 buf: String::new(),
292 cursor, 335 cursor,
293 text_pos: 0.into(), 336 text_pos: 0.into(),
294 inner: SyntaxTreeBuilder::default(), 337 inner: SyntaxTreeBuilder::default(),
295 roots: smallvec::SmallVec::new(), 338 roots: smallvec::SmallVec::new(),
339 range_map,
340 range_marker: None,
296 } 341 }
297 } 342 }
298} 343}
@@ -317,6 +362,8 @@ impl<'a> TreeSink for TtTreeSink<'a> {
317 return; 362 return;
318 } 363 }
319 364
365 let mut last_ident = None;
366
320 for _ in 0..n_tokens { 367 for _ in 0..n_tokens {
321 if self.cursor.eof() { 368 if self.cursor.eof() {
322 break; 369 break;
@@ -326,6 +373,10 @@ impl<'a> TreeSink for TtTreeSink<'a> {
326 Some(tt::TokenTree::Leaf(leaf)) => { 373 Some(tt::TokenTree::Leaf(leaf)) => {
327 self.cursor = self.cursor.bump(); 374 self.cursor = self.cursor.bump();
328 self.buf += &format!("{}", leaf); 375 self.buf += &format!("{}", leaf);
376
377 if let tt::Leaf::Ident(ident) = leaf {
378 last_ident = Some(ident);
379 }
329 } 380 }
330 Some(tt::TokenTree::Subtree(subtree)) => { 381 Some(tt::TokenTree::Subtree(subtree)) => {
331 self.cursor = self.cursor.subtree().unwrap(); 382 self.cursor = self.cursor.subtree().unwrap();
@@ -345,6 +396,14 @@ impl<'a> TreeSink for TtTreeSink<'a> {
345 self.buf.clear(); 396 self.buf.clear();
346 self.inner.token(kind, text); 397 self.inner.token(kind, text);
347 398
399 // Mark the range if needed
400 if let Some((range, token_id)) = self.range_marker.as_mut() {
401 if let Some(ident) = last_ident {
402 *range = TextRange::offset_len(range.start(), TextUnit::of_str(&ident.text));
403 *token_id = ident.id;
404 }
405 }
406
348 // Add whitespace between adjoint puncts 407 // Add whitespace between adjoint puncts
349 let next = self.cursor.bump(); 408 let next = self.cursor.bump();
350 if let ( 409 if let (
@@ -354,6 +413,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
354 { 413 {
355 if curr.spacing == tt::Spacing::Alone { 414 if curr.spacing == tt::Spacing::Alone {
356 self.inner.token(WHITESPACE, " ".into()); 415 self.inner.token(WHITESPACE, " ".into());
416 self.text_pos += TextUnit::of_char(' ');
357 } 417 }
358 } 418 }
359 } 419 }
@@ -361,6 +421,15 @@ impl<'a> TreeSink for TtTreeSink<'a> {
361 fn start_node(&mut self, kind: SyntaxKind) { 421 fn start_node(&mut self, kind: SyntaxKind) {
362 self.inner.start_node(kind); 422 self.inner.start_node(kind);
363 423
424 self.range_marker = if kind == IDENT {
425 Some((
426 TextRange::offset_len(self.text_pos, TextUnit::from_usize(0)),
427 tt::TokenId::unspecified(),
428 ))
429 } else {
430 None
431 };
432
364 match self.roots.last_mut() { 433 match self.roots.last_mut() {
365 None | Some(0) => self.roots.push(1), 434 None | Some(0) => self.roots.push(1),
366 Some(ref mut n) => **n += 1, 435 Some(ref mut n) => **n += 1,
@@ -370,6 +439,12 @@ impl<'a> TreeSink for TtTreeSink<'a> {
370 fn finish_node(&mut self) { 439 fn finish_node(&mut self) {
371 self.inner.finish_node(); 440 self.inner.finish_node();
372 *self.roots.last_mut().unwrap() -= 1; 441 *self.roots.last_mut().unwrap() -= 1;
442
443 if let Some(range) = self.range_marker {
444 if range.1 != tt::TokenId::unspecified() {
445 self.range_map.set(range.0, &range.1)
446 }
447 }
373 } 448 }
374 449
375 fn error(&mut self, error: ParseError) { 450 fn error(&mut self, error: ParseError) {
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs
index a23e3afe3..a848ea334 100644
--- a/crates/ra_mbe/src/tests.rs
+++ b/crates/ra_mbe/src/tests.rs
@@ -126,7 +126,7 @@ fn test_expr_order() {
126"#, 126"#,
127 ); 127 );
128 let expanded = expand(&rules, "foo! { 1 + 1}"); 128 let expanded = expand(&rules, "foo! { 1 + 1}");
129 let tree = token_tree_to_items(&expanded).unwrap().tree(); 129 let tree = token_tree_to_items(&expanded).unwrap().0.tree();
130 130
131 let dump = format!("{:#?}", tree.syntax()); 131 let dump = format!("{:#?}", tree.syntax());
132 assert_eq_text!( 132 assert_eq_text!(
@@ -383,7 +383,7 @@ fn test_expand_to_item_list() {
383 ", 383 ",
384 ); 384 );
385 let expansion = expand(&rules, "structs!(Foo, Bar);"); 385 let expansion = expand(&rules, "structs!(Foo, Bar);");
386 let tree = token_tree_to_items(&expansion).unwrap().tree(); 386 let tree = token_tree_to_items(&expansion).unwrap().0.tree();
387 assert_eq!( 387 assert_eq!(
388 format!("{:#?}", tree.syntax()).trim(), 388 format!("{:#?}", tree.syntax()).trim(),
389 r#" 389 r#"
@@ -501,7 +501,7 @@ fn test_tt_to_stmts() {
501 ); 501 );
502 502
503 let expanded = expand(&rules, "foo!{}"); 503 let expanded = expand(&rules, "foo!{}");
504 let stmts = token_tree_to_macro_stmts(&expanded).unwrap().tree(); 504 let stmts = token_tree_to_macro_stmts(&expanded).unwrap().0.tree();
505 505
506 assert_eq!( 506 assert_eq!(
507 format!("{:#?}", stmts.syntax()).trim(), 507 format!("{:#?}", stmts.syntax()).trim(),
@@ -946,7 +946,7 @@ fn test_vec() {
946 ); 946 );
947 947
948 let expansion = expand(&rules, r#"vec![1u32,2];"#); 948 let expansion = expand(&rules, r#"vec![1u32,2];"#);
949 let tree = token_tree_to_expr(&expansion).unwrap().tree(); 949 let tree = token_tree_to_expr(&expansion).unwrap().0.tree();
950 950
951 assert_eq!( 951 assert_eq!(
952 format!("{:#?}", tree.syntax()).trim(), 952 format!("{:#?}", tree.syntax()).trim(),
@@ -1436,8 +1436,8 @@ pub(crate) fn assert_expansion(
1436 }; 1436 };
1437 let (expanded_tree, expected_tree) = match kind { 1437 let (expanded_tree, expected_tree) = match kind {
1438 MacroKind::Items => { 1438 MacroKind::Items => {
1439 let expanded_tree = token_tree_to_items(&expanded).unwrap().tree(); 1439 let expanded_tree = token_tree_to_items(&expanded).unwrap().0.tree();
1440 let expected_tree = token_tree_to_items(&expected).unwrap().tree(); 1440 let expected_tree = token_tree_to_items(&expected).unwrap().0.tree();
1441 1441
1442 ( 1442 (
1443 debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), 1443 debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),
@@ -1446,8 +1446,8 @@ pub(crate) fn assert_expansion(
1446 } 1446 }
1447 1447
1448 MacroKind::Stmts => { 1448 MacroKind::Stmts => {
1449 let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().tree(); 1449 let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().0.tree();
1450 let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().tree(); 1450 let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().0.tree();
1451 1451
1452 ( 1452 (
1453 debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), 1453 debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),