diff options
author | Aleksey Kladov <[email protected]> | 2019-11-18 13:08:41 +0000 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2019-11-18 13:08:41 +0000 |
commit | 2b6f1ab0e2303dce8e6e424514fd2a9e74566c89 (patch) | |
tree | c4eb2e0d8dce47d503d1ed063a6b3e0fadba1c1f | |
parent | 789a0d2a6474f924a0c3239a085b5e8579b1e7f6 (diff) |
Collapse TokenMap and RevTokenMap
-rw-r--r-- | crates/ra_hir_expand/src/db.rs | 8 | ||||
-rw-r--r-- | crates/ra_hir_expand/src/lib.rs | 4 | ||||
-rw-r--r-- | crates/ra_mbe/src/lib.rs | 2 | ||||
-rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 210 |
4 files changed, 105 insertions, 119 deletions
diff --git a/crates/ra_hir_expand/src/db.rs b/crates/ra_hir_expand/src/db.rs index 9de7c1ea8..3c11c8a22 100644 --- a/crates/ra_hir_expand/src/db.rs +++ b/crates/ra_hir_expand/src/db.rs | |||
@@ -59,10 +59,8 @@ pub trait AstDatabase: SourceDatabase { | |||
59 | fn intern_macro(&self, macro_call: MacroCallLoc) -> MacroCallId; | 59 | fn intern_macro(&self, macro_call: MacroCallLoc) -> MacroCallId; |
60 | fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>; | 60 | fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>; |
61 | fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>; | 61 | fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>; |
62 | fn parse_macro( | 62 | fn parse_macro(&self, macro_file: MacroFile) |
63 | &self, | 63 | -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>; |
64 | macro_file: MacroFile, | ||
65 | ) -> Option<(Parse<SyntaxNode>, Arc<mbe::RevTokenMap>)>; | ||
66 | fn macro_expand(&self, macro_call: MacroCallId) -> Result<Arc<tt::Subtree>, String>; | 64 | fn macro_expand(&self, macro_call: MacroCallId) -> Result<Arc<tt::Subtree>, String>; |
67 | } | 65 | } |
68 | 66 | ||
@@ -136,7 +134,7 @@ pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Optio | |||
136 | pub(crate) fn parse_macro( | 134 | pub(crate) fn parse_macro( |
137 | db: &dyn AstDatabase, | 135 | db: &dyn AstDatabase, |
138 | macro_file: MacroFile, | 136 | macro_file: MacroFile, |
139 | ) -> Option<(Parse<SyntaxNode>, Arc<mbe::RevTokenMap>)> { | 137 | ) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> { |
140 | let _p = profile("parse_macro_query"); | 138 | let _p = profile("parse_macro_query"); |
141 | 139 | ||
142 | let macro_call_id = macro_file.macro_call_id; | 140 | let macro_call_id = macro_file.macro_call_id; |
diff --git a/crates/ra_hir_expand/src/lib.rs b/crates/ra_hir_expand/src/lib.rs index 73ec1688c..cfe7e6d15 100644 --- a/crates/ra_hir_expand/src/lib.rs +++ b/crates/ra_hir_expand/src/lib.rs | |||
@@ -159,7 +159,7 @@ pub struct ExpansionInfo { | |||
159 | 159 | ||
160 | macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, | 160 | macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, |
161 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, | 161 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, |
162 | exp_map: Arc<mbe::RevTokenMap>, | 162 | exp_map: Arc<mbe::TokenMap>, |
163 | } | 163 | } |
164 | 164 | ||
165 | impl ExpansionInfo { | 165 | impl ExpansionInfo { |
@@ -186,7 +186,7 @@ impl ExpansionInfo { | |||
186 | mbe::Origin::Def => (&self.macro_def.1, &self.def), | 186 | mbe::Origin::Def => (&self.macro_def.1, &self.def), |
187 | }; | 187 | }; |
188 | 188 | ||
189 | let range = token_map.relative_range_of(token_id)?; | 189 | let range = token_map.range_by_token(token_id)?; |
190 | let token = algo::find_covering_element( | 190 | let token = algo::find_covering_element( |
191 | tt.ast.syntax(), | 191 | tt.ast.syntax(), |
192 | range + tt.ast.syntax().text_range().start(), | 192 | range + tt.ast.syntax().text_range().start(), |
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs index 58ca95368..bbddebe67 100644 --- a/crates/ra_mbe/src/lib.rs +++ b/crates/ra_mbe/src/lib.rs | |||
@@ -31,7 +31,7 @@ pub enum ExpandError { | |||
31 | } | 31 | } |
32 | 32 | ||
33 | pub use crate::syntax_bridge::{ | 33 | pub use crate::syntax_bridge::{ |
34 | ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, RevTokenMap, TokenMap, | 34 | ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, TokenMap, |
35 | }; | 35 | }; |
36 | 36 | ||
37 | /// This struct contains AST for a single `macro_rules` definition. What might | 37 | /// This struct contains AST for a single `macro_rules` definition. What might |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index fe3b70b8d..d1c49c0b3 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -14,30 +14,22 @@ use crate::ExpandError; | |||
14 | #[derive(Debug, PartialEq, Eq, Default)] | 14 | #[derive(Debug, PartialEq, Eq, Default)] |
15 | pub struct TokenMap { | 15 | pub struct TokenMap { |
16 | /// Maps `tt::TokenId` to the *relative* source range. | 16 | /// Maps `tt::TokenId` to the *relative* source range. |
17 | tokens: Vec<TextRange>, | 17 | entries: Vec<(tt::TokenId, TextRange)>, |
18 | } | ||
19 | |||
20 | /// Maps relative range of the expanded syntax node to `tt::TokenId` | ||
21 | #[derive(Debug, PartialEq, Eq, Default)] | ||
22 | pub struct RevTokenMap { | ||
23 | ranges: Vec<(TextRange, tt::TokenId)>, | ||
24 | } | 18 | } |
25 | 19 | ||
26 | /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro | 20 | /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro |
27 | /// will consume). | 21 | /// will consume). |
28 | pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> { | 22 | pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> { |
29 | let mut token_map = TokenMap::default(); | 23 | syntax_node_to_token_tree(ast.syntax()) |
30 | let node = ast.syntax(); | ||
31 | let tt = convert_tt(&mut token_map, node.text_range().start(), node)?; | ||
32 | Some((tt, token_map)) | ||
33 | } | 24 | } |
34 | 25 | ||
35 | /// Convert the syntax node to a `TokenTree` (what macro | 26 | /// Convert the syntax node to a `TokenTree` (what macro |
36 | /// will consume). | 27 | /// will consume). |
37 | pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> { | 28 | pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> { |
38 | let mut token_map = TokenMap::default(); | 29 | let global_offset = node.text_range().start(); |
39 | let tt = convert_tt(&mut token_map, node.text_range().start(), node)?; | 30 | let mut c = Convertor { map: TokenMap::default(), global_offset, next_id: 0 }; |
40 | Some((tt, token_map)) | 31 | let subtree = c.go(node)?; |
32 | Some((subtree, c.map)) | ||
41 | } | 33 | } |
42 | 34 | ||
43 | // The following items are what `rustc` macro can be parsed into : | 35 | // The following items are what `rustc` macro can be parsed into : |
@@ -55,7 +47,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke | |||
55 | pub fn token_tree_to_syntax_node( | 47 | pub fn token_tree_to_syntax_node( |
56 | tt: &tt::Subtree, | 48 | tt: &tt::Subtree, |
57 | fragment_kind: FragmentKind, | 49 | fragment_kind: FragmentKind, |
58 | ) -> Result<(Parse<SyntaxNode>, RevTokenMap), ExpandError> { | 50 | ) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> { |
59 | let tmp; | 51 | let tmp; |
60 | let tokens = match tt { | 52 | let tokens = match tt { |
61 | tt::Subtree { delimiter: tt::Delimiter::None, token_trees } => token_trees.as_slice(), | 53 | tt::Subtree { delimiter: tt::Delimiter::None, token_trees } => token_trees.as_slice(), |
@@ -78,35 +70,17 @@ pub fn token_tree_to_syntax_node( | |||
78 | 70 | ||
79 | impl TokenMap { | 71 | impl TokenMap { |
80 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { | 72 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { |
81 | let (idx, _) = | 73 | let &(token_id, _) = self.entries.iter().find(|(_, range)| *range == relative_range)?; |
82 | self.tokens.iter().enumerate().find(|(_, range)| **range == relative_range)?; | 74 | Some(token_id) |
83 | Some(tt::TokenId(idx as u32)) | ||
84 | } | ||
85 | |||
86 | pub fn relative_range_of(&self, token_id: tt::TokenId) -> Option<TextRange> { | ||
87 | let idx = token_id.0 as usize; | ||
88 | self.tokens.get(idx).copied() | ||
89 | } | ||
90 | |||
91 | fn alloc(&mut self, relative_range: TextRange) -> tt::TokenId { | ||
92 | let id = self.tokens.len(); | ||
93 | self.tokens.push(relative_range); | ||
94 | tt::TokenId(id as u32) | ||
95 | } | ||
96 | } | ||
97 | |||
98 | impl RevTokenMap { | ||
99 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { | ||
100 | self.ranges.iter().find(|&it| it.0 == relative_range).map(|it| it.1) | ||
101 | } | 75 | } |
102 | 76 | ||
103 | pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TextRange> { | 77 | pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TextRange> { |
104 | let &(r, _) = self.ranges.iter().find(|(_, tid)| *tid == token_id)?; | 78 | let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?; |
105 | Some(r) | 79 | Some(range) |
106 | } | 80 | } |
107 | 81 | ||
108 | fn add(&mut self, relative_range: TextRange, token_id: tt::TokenId) { | 82 | fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { |
109 | self.ranges.push((relative_range, token_id.clone())) | 83 | self.entries.push((token_id, relative_range)); |
110 | } | 84 | } |
111 | } | 85 | } |
112 | 86 | ||
@@ -171,84 +145,98 @@ fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTr | |||
171 | } | 145 | } |
172 | } | 146 | } |
173 | 147 | ||
174 | fn convert_tt( | 148 | struct Convertor { |
175 | token_map: &mut TokenMap, | 149 | map: TokenMap, |
176 | global_offset: TextUnit, | 150 | global_offset: TextUnit, |
177 | tt: &SyntaxNode, | 151 | next_id: u32, |
178 | ) -> Option<tt::Subtree> { | 152 | } |
179 | // This tree is empty | ||
180 | if tt.first_child_or_token().is_none() { | ||
181 | return Some(tt::Subtree { token_trees: vec![], delimiter: tt::Delimiter::None }); | ||
182 | } | ||
183 | 153 | ||
184 | let first_child = tt.first_child_or_token()?; | 154 | impl Convertor { |
185 | let last_child = tt.last_child_or_token()?; | 155 | fn go(&mut self, tt: &SyntaxNode) -> Option<tt::Subtree> { |
186 | let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) { | 156 | // This tree is empty |
187 | (T!['('], T![')']) => (tt::Delimiter::Parenthesis, true), | 157 | if tt.first_child_or_token().is_none() { |
188 | (T!['{'], T!['}']) => (tt::Delimiter::Brace, true), | 158 | return Some(tt::Subtree { token_trees: vec![], delimiter: tt::Delimiter::None }); |
189 | (T!['['], T![']']) => (tt::Delimiter::Bracket, true), | 159 | } |
190 | _ => (tt::Delimiter::None, false), | ||
191 | }; | ||
192 | 160 | ||
193 | let mut token_trees = Vec::new(); | 161 | let first_child = tt.first_child_or_token()?; |
194 | let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable(); | 162 | let last_child = tt.last_child_or_token()?; |
163 | let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) { | ||
164 | (T!['('], T![')']) => (tt::Delimiter::Parenthesis, true), | ||
165 | (T!['{'], T!['}']) => (tt::Delimiter::Brace, true), | ||
166 | (T!['['], T![']']) => (tt::Delimiter::Bracket, true), | ||
167 | _ => (tt::Delimiter::None, false), | ||
168 | }; | ||
195 | 169 | ||
196 | while let Some(child) = child_iter.next() { | 170 | let mut token_trees = Vec::new(); |
197 | if skip_first && (child == first_child || child == last_child) { | 171 | let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable(); |
198 | continue; | ||
199 | } | ||
200 | 172 | ||
201 | match child { | 173 | while let Some(child) = child_iter.next() { |
202 | NodeOrToken::Token(token) => { | 174 | if skip_first && (child == first_child || child == last_child) { |
203 | if let Some(doc_tokens) = convert_doc_comment(&token) { | 175 | continue; |
204 | token_trees.extend(doc_tokens); | 176 | } |
205 | } else if token.kind().is_trivia() { | 177 | |
206 | continue; | 178 | match child { |
207 | } else if token.kind().is_punct() { | 179 | NodeOrToken::Token(token) => { |
208 | assert!(token.text().len() == 1, "Input ast::token punct must be single char."); | 180 | if let Some(doc_tokens) = convert_doc_comment(&token) { |
209 | let char = token.text().chars().next().unwrap(); | 181 | token_trees.extend(doc_tokens); |
210 | 182 | } else if token.kind().is_trivia() { | |
211 | let spacing = match child_iter.peek() { | 183 | continue; |
212 | Some(NodeOrToken::Token(token)) => { | 184 | } else if token.kind().is_punct() { |
213 | if token.kind().is_punct() { | 185 | assert!( |
214 | tt::Spacing::Joint | 186 | token.text().len() == 1, |
215 | } else { | 187 | "Input ast::token punct must be single char." |
216 | tt::Spacing::Alone | 188 | ); |
189 | let char = token.text().chars().next().unwrap(); | ||
190 | |||
191 | let spacing = match child_iter.peek() { | ||
192 | Some(NodeOrToken::Token(token)) => { | ||
193 | if token.kind().is_punct() { | ||
194 | tt::Spacing::Joint | ||
195 | } else { | ||
196 | tt::Spacing::Alone | ||
197 | } | ||
217 | } | 198 | } |
218 | } | 199 | _ => tt::Spacing::Alone, |
219 | _ => tt::Spacing::Alone, | ||
220 | }; | ||
221 | |||
222 | token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into()); | ||
223 | } else { | ||
224 | let child: tt::TokenTree = | ||
225 | if token.kind() == T![true] || token.kind() == T![false] { | ||
226 | tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() | ||
227 | } else if token.kind().is_keyword() | ||
228 | || token.kind() == IDENT | ||
229 | || token.kind() == LIFETIME | ||
230 | { | ||
231 | let relative_range = token.text_range() - global_offset; | ||
232 | let id = token_map.alloc(relative_range); | ||
233 | let text = token.text().clone(); | ||
234 | tt::Leaf::from(tt::Ident { text, id }).into() | ||
235 | } else if token.kind().is_literal() { | ||
236 | tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() | ||
237 | } else { | ||
238 | return None; | ||
239 | }; | 200 | }; |
201 | |||
202 | token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into()); | ||
203 | } else { | ||
204 | let child: tt::TokenTree = | ||
205 | if token.kind() == T![true] || token.kind() == T![false] { | ||
206 | tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() | ||
207 | } else if token.kind().is_keyword() | ||
208 | || token.kind() == IDENT | ||
209 | || token.kind() == LIFETIME | ||
210 | { | ||
211 | let id = self.alloc(token.text_range()); | ||
212 | let text = token.text().clone(); | ||
213 | tt::Leaf::from(tt::Ident { text, id }).into() | ||
214 | } else if token.kind().is_literal() { | ||
215 | tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() | ||
216 | } else { | ||
217 | return None; | ||
218 | }; | ||
219 | token_trees.push(child); | ||
220 | } | ||
221 | } | ||
222 | NodeOrToken::Node(node) => { | ||
223 | let child = self.go(&node)?.into(); | ||
240 | token_trees.push(child); | 224 | token_trees.push(child); |
241 | } | 225 | } |
242 | } | 226 | }; |
243 | NodeOrToken::Node(node) => { | 227 | } |
244 | let child = convert_tt(token_map, global_offset, &node)?.into(); | 228 | |
245 | token_trees.push(child); | 229 | let res = tt::Subtree { delimiter, token_trees }; |
246 | } | 230 | Some(res) |
247 | }; | ||
248 | } | 231 | } |
249 | 232 | ||
250 | let res = tt::Subtree { delimiter, token_trees }; | 233 | fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId { |
251 | Some(res) | 234 | let relative_range = absolute_range - self.global_offset; |
235 | let token_id = tt::TokenId(self.next_id); | ||
236 | self.next_id += 1; | ||
237 | self.map.insert(token_id, relative_range); | ||
238 | token_id | ||
239 | } | ||
252 | } | 240 | } |
253 | 241 | ||
254 | struct TtTreeSink<'a> { | 242 | struct TtTreeSink<'a> { |
@@ -256,7 +244,7 @@ struct TtTreeSink<'a> { | |||
256 | cursor: Cursor<'a>, | 244 | cursor: Cursor<'a>, |
257 | text_pos: TextUnit, | 245 | text_pos: TextUnit, |
258 | inner: SyntaxTreeBuilder, | 246 | inner: SyntaxTreeBuilder, |
259 | range_map: RevTokenMap, | 247 | token_map: TokenMap, |
260 | 248 | ||
261 | // Number of roots | 249 | // Number of roots |
262 | // Use for detect ill-form tree which is not single root | 250 | // Use for detect ill-form tree which is not single root |
@@ -271,12 +259,12 @@ impl<'a> TtTreeSink<'a> { | |||
271 | text_pos: 0.into(), | 259 | text_pos: 0.into(), |
272 | inner: SyntaxTreeBuilder::default(), | 260 | inner: SyntaxTreeBuilder::default(), |
273 | roots: smallvec::SmallVec::new(), | 261 | roots: smallvec::SmallVec::new(), |
274 | range_map: RevTokenMap::default(), | 262 | token_map: TokenMap::default(), |
275 | } | 263 | } |
276 | } | 264 | } |
277 | 265 | ||
278 | fn finish(self) -> (Parse<SyntaxNode>, RevTokenMap) { | 266 | fn finish(self) -> (Parse<SyntaxNode>, TokenMap) { |
279 | (self.inner.finish(), self.range_map) | 267 | (self.inner.finish(), self.token_map) |
280 | } | 268 | } |
281 | } | 269 | } |
282 | 270 | ||
@@ -312,7 +300,7 @@ impl<'a> TreeSink for TtTreeSink<'a> { | |||
312 | if kind == IDENT { | 300 | if kind == IDENT { |
313 | let range = | 301 | let range = |
314 | TextRange::offset_len(self.text_pos, TextUnit::of_str(&ident.text)); | 302 | TextRange::offset_len(self.text_pos, TextUnit::of_str(&ident.text)); |
315 | self.range_map.add(range, ident.id); | 303 | self.token_map.insert(ident.id, range); |
316 | } | 304 | } |
317 | } | 305 | } |
318 | 306 | ||