diff options
author | bors[bot] <26634292+bors[bot]@users.noreply.github.com> | 2021-05-24 20:52:01 +0100 |
---|---|---|
committer | GitHub <[email protected]> | 2021-05-24 20:52:01 +0100 |
commit | f5f24a9a2c2e9d5d5ff155e700b2bf647f926d47 (patch) | |
tree | 273c8086766a31f0ff5df2a89cbe670828e887db /crates | |
parent | 1ebb53e5db32e32c00e4ce0ceba4277b53967056 (diff) | |
parent | c8f40b1503cb461b935f5fb0a44fa8e26976c363 (diff) |
Merge #8977
8977: internal: minor `TokenMap` cleanups r=jonas-schievink a=jonas-schievink
bors r+
Co-authored-by: Jonas Schievink <[email protected]>
Diffstat (limited to 'crates')
-rw-r--r-- | crates/hir_expand/src/db.rs | 2 | ||||
-rw-r--r-- | crates/hir_expand/src/hygiene.rs | 2 | ||||
-rw-r--r-- | crates/hir_expand/src/lib.rs | 4 | ||||
-rw-r--r-- | crates/mbe/src/lib.rs | 10 | ||||
-rw-r--r-- | crates/mbe/src/syntax_bridge.rs | 83 | ||||
-rw-r--r-- | crates/mbe/src/tests/expand.rs | 5 | ||||
-rw-r--r-- | crates/mbe/src/token_map.rs | 85 |
7 files changed, 101 insertions, 90 deletions
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index db9d19b0c..625c26f0a 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs | |||
@@ -155,7 +155,7 @@ pub fn expand_speculative( | |||
155 | mbe::token_tree_to_syntax_node(&speculative_expansion.value, fragment_kind).ok()?; | 155 | mbe::token_tree_to_syntax_node(&speculative_expansion.value, fragment_kind).ok()?; |
156 | 156 | ||
157 | let token_id = macro_def.map_id_down(token_id); | 157 | let token_id = macro_def.map_id_down(token_id); |
158 | let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; | 158 | let range = tmap_2.range_by_token(token_id, token_to_map.kind())?; |
159 | let token = node.syntax_node().covering_element(range).into_token()?; | 159 | let token = node.syntax_node().covering_element(range).into_token()?; |
160 | Some((node.syntax_node(), token)) | 160 | Some((node.syntax_node(), token)) |
161 | } | 161 | } |
diff --git a/crates/hir_expand/src/hygiene.rs b/crates/hir_expand/src/hygiene.rs index 38e09fdd4..d98913907 100644 --- a/crates/hir_expand/src/hygiene.rs +++ b/crates/hir_expand/src/hygiene.rs | |||
@@ -154,7 +154,7 @@ impl HygieneInfo { | |||
154 | }, | 154 | }, |
155 | }; | 155 | }; |
156 | 156 | ||
157 | let range = token_map.range_by_token(token_id)?.by_kind(SyntaxKind::IDENT)?; | 157 | let range = token_map.range_by_token(token_id, SyntaxKind::IDENT)?; |
158 | Some((tt.with_value(range + tt.value), origin)) | 158 | Some((tt.with_value(range + tt.value), origin)) |
159 | } | 159 | } |
160 | } | 160 | } |
diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index 92c679dd2..6be4516a3 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs | |||
@@ -329,7 +329,7 @@ impl ExpansionInfo { | |||
329 | let token_id = self.macro_arg.1.token_by_range(range)?; | 329 | let token_id = self.macro_arg.1.token_by_range(range)?; |
330 | let token_id = self.macro_def.map_id_down(token_id); | 330 | let token_id = self.macro_def.map_id_down(token_id); |
331 | 331 | ||
332 | let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?; | 332 | let range = self.exp_map.range_by_token(token_id, token.value.kind())?; |
333 | 333 | ||
334 | let token = self.expanded.value.covering_element(range).into_token()?; | 334 | let token = self.expanded.value.covering_element(range).into_token()?; |
335 | 335 | ||
@@ -354,7 +354,7 @@ impl ExpansionInfo { | |||
354 | }, | 354 | }, |
355 | }; | 355 | }; |
356 | 356 | ||
357 | let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?; | 357 | let range = token_map.range_by_token(token_id, token.value.kind())?; |
358 | let token = | 358 | let token = |
359 | tt.value.covering_element(range + tt.value.text_range().start()).into_token()?; | 359 | tt.value.covering_element(range + tt.value.text_range().start()).into_token()?; |
360 | Some((tt.with_value(token), origin)) | 360 | Some((tt.with_value(token), origin)) |
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 3af5bc18b..b95374b76 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs | |||
@@ -14,6 +14,7 @@ mod tests; | |||
14 | 14 | ||
15 | #[cfg(test)] | 15 | #[cfg(test)] |
16 | mod benchmark; | 16 | mod benchmark; |
17 | mod token_map; | ||
17 | 18 | ||
18 | use std::fmt; | 19 | use std::fmt; |
19 | 20 | ||
@@ -63,9 +64,12 @@ impl fmt::Display for ExpandError { | |||
63 | } | 64 | } |
64 | } | 65 | } |
65 | 66 | ||
66 | pub use crate::syntax_bridge::{ | 67 | pub use crate::{ |
67 | ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, | 68 | syntax_bridge::{ |
68 | token_tree_to_syntax_node, TokenMap, | 69 | ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, |
70 | token_tree_to_syntax_node, | ||
71 | }, | ||
72 | token_map::TokenMap, | ||
69 | }; | 73 | }; |
70 | 74 | ||
71 | /// This struct contains AST for a single `macro_rules` definition. What might | 75 | /// This struct contains AST for a single `macro_rules` definition. What might |
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index b13168bd3..b11172caf 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs | |||
@@ -10,36 +10,8 @@ use syntax::{ | |||
10 | }; | 10 | }; |
11 | use tt::buffer::{Cursor, TokenBuffer}; | 11 | use tt::buffer::{Cursor, TokenBuffer}; |
12 | 12 | ||
13 | use crate::ExpandError; | ||
14 | use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter}; | 13 | use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter}; |
15 | 14 | use crate::{ExpandError, TokenMap}; | |
16 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | ||
17 | pub enum TokenTextRange { | ||
18 | Token(TextRange), | ||
19 | Delimiter(TextRange), | ||
20 | } | ||
21 | |||
22 | impl TokenTextRange { | ||
23 | pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> { | ||
24 | match self { | ||
25 | TokenTextRange::Token(it) => Some(it), | ||
26 | TokenTextRange::Delimiter(it) => match kind { | ||
27 | T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())), | ||
28 | T!['}'] | T![')'] | T![']'] => { | ||
29 | Some(TextRange::at(it.end() - TextSize::of('}'), 1.into())) | ||
30 | } | ||
31 | _ => None, | ||
32 | }, | ||
33 | } | ||
34 | } | ||
35 | } | ||
36 | |||
37 | /// Maps `tt::TokenId` to the relative range of the original token. | ||
38 | #[derive(Debug, PartialEq, Eq, Clone, Default)] | ||
39 | pub struct TokenMap { | ||
40 | /// Maps `tt::TokenId` to the *relative* source range. | ||
41 | entries: Vec<(tt::TokenId, TokenTextRange)>, | ||
42 | } | ||
43 | 15 | ||
44 | /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro | 16 | /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro |
45 | /// will consume). | 17 | /// will consume). |
@@ -53,7 +25,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) { | |||
53 | let global_offset = node.text_range().start(); | 25 | let global_offset = node.text_range().start(); |
54 | let mut c = Convertor::new(node, global_offset); | 26 | let mut c = Convertor::new(node, global_offset); |
55 | let subtree = c.go(); | 27 | let subtree = c.go(); |
56 | c.id_alloc.map.entries.shrink_to_fit(); | 28 | c.id_alloc.map.shrink_to_fit(); |
57 | (subtree, c.id_alloc.map) | 29 | (subtree, c.id_alloc.map) |
58 | } | 30 | } |
59 | 31 | ||
@@ -149,55 +121,6 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> { | |||
149 | res | 121 | res |
150 | } | 122 | } |
151 | 123 | ||
152 | impl TokenMap { | ||
153 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { | ||
154 | let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { | ||
155 | TokenTextRange::Token(it) => *it == relative_range, | ||
156 | TokenTextRange::Delimiter(it) => { | ||
157 | let open = TextRange::at(it.start(), 1.into()); | ||
158 | let close = TextRange::at(it.end() - TextSize::of('}'), 1.into()); | ||
159 | open == relative_range || close == relative_range | ||
160 | } | ||
161 | })?; | ||
162 | Some(token_id) | ||
163 | } | ||
164 | |||
165 | pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TokenTextRange> { | ||
166 | let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?; | ||
167 | Some(range) | ||
168 | } | ||
169 | |||
170 | fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { | ||
171 | self.entries.push((token_id, TokenTextRange::Token(relative_range))); | ||
172 | } | ||
173 | |||
174 | fn insert_delim( | ||
175 | &mut self, | ||
176 | token_id: tt::TokenId, | ||
177 | open_relative_range: TextRange, | ||
178 | close_relative_range: TextRange, | ||
179 | ) -> usize { | ||
180 | let res = self.entries.len(); | ||
181 | let cover = open_relative_range.cover(close_relative_range); | ||
182 | |||
183 | self.entries.push((token_id, TokenTextRange::Delimiter(cover))); | ||
184 | res | ||
185 | } | ||
186 | |||
187 | fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { | ||
188 | let (_, token_text_range) = &mut self.entries[idx]; | ||
189 | if let TokenTextRange::Delimiter(dim) = token_text_range { | ||
190 | let cover = dim.cover(close_relative_range); | ||
191 | *token_text_range = TokenTextRange::Delimiter(cover); | ||
192 | } | ||
193 | } | ||
194 | |||
195 | fn remove_delim(&mut self, idx: usize) { | ||
196 | // FIXME: This could be accidentally quadratic | ||
197 | self.entries.remove(idx); | ||
198 | } | ||
199 | } | ||
200 | |||
201 | /// Returns the textual content of a doc comment block as a quoted string | 124 | /// Returns the textual content of a doc comment block as a quoted string |
202 | /// That is, strips leading `///` (or `/**`, etc) | 125 | /// That is, strips leading `///` (or `/**`, etc) |
203 | /// and strips the ending `*/` | 126 | /// and strips the ending `*/` |
@@ -634,7 +557,7 @@ impl<'a> TtTreeSink<'a> { | |||
634 | } | 557 | } |
635 | 558 | ||
636 | fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) { | 559 | fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) { |
637 | self.token_map.entries.shrink_to_fit(); | 560 | self.token_map.shrink_to_fit(); |
638 | (self.inner.finish(), self.token_map) | 561 | (self.inner.finish(), self.token_map) |
639 | } | 562 | } |
640 | } | 563 | } |
diff --git a/crates/mbe/src/tests/expand.rs b/crates/mbe/src/tests/expand.rs index 3a1d840ea..5f173f513 100644 --- a/crates/mbe/src/tests/expand.rs +++ b/crates/mbe/src/tests/expand.rs | |||
@@ -58,9 +58,8 @@ macro_rules! foobar { | |||
58 | let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap(); | 58 | let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap(); |
59 | let content = node.syntax_node().to_string(); | 59 | let content = node.syntax_node().to_string(); |
60 | 60 | ||
61 | let get_text = |id, kind| -> String { | 61 | let get_text = |
62 | content[token_map.range_by_token(id).unwrap().by_kind(kind).unwrap()].to_string() | 62 | |id, kind| -> String { content[token_map.range_by_token(id, kind).unwrap()].to_string() }; |
63 | }; | ||
64 | 63 | ||
65 | assert_eq!(expanded.token_trees.len(), 4); | 64 | assert_eq!(expanded.token_trees.len(), 4); |
66 | // {($e:ident) => { fn $e() {} }} | 65 | // {($e:ident) => { fn $e() {} }} |
diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs new file mode 100644 index 000000000..6df3de3b3 --- /dev/null +++ b/crates/mbe/src/token_map.rs | |||
@@ -0,0 +1,85 @@ | |||
1 | //! Mapping between `TokenId`s and the token's position in macro definitions or inputs. | ||
2 | |||
3 | use parser::{SyntaxKind, T}; | ||
4 | use syntax::{TextRange, TextSize}; | ||
5 | |||
6 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | ||
7 | enum TokenTextRange { | ||
8 | Token(TextRange), | ||
9 | Delimiter(TextRange), | ||
10 | } | ||
11 | |||
12 | impl TokenTextRange { | ||
13 | fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> { | ||
14 | match self { | ||
15 | TokenTextRange::Token(it) => Some(it), | ||
16 | TokenTextRange::Delimiter(it) => match kind { | ||
17 | T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())), | ||
18 | T!['}'] | T![')'] | T![']'] => { | ||
19 | Some(TextRange::at(it.end() - TextSize::of('}'), 1.into())) | ||
20 | } | ||
21 | _ => None, | ||
22 | }, | ||
23 | } | ||
24 | } | ||
25 | } | ||
26 | |||
27 | /// Maps `tt::TokenId` to the relative range of the original token. | ||
28 | #[derive(Debug, PartialEq, Eq, Clone, Default)] | ||
29 | pub struct TokenMap { | ||
30 | /// Maps `tt::TokenId` to the *relative* source range. | ||
31 | entries: Vec<(tt::TokenId, TokenTextRange)>, | ||
32 | } | ||
33 | |||
34 | impl TokenMap { | ||
35 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { | ||
36 | let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { | ||
37 | TokenTextRange::Token(it) => *it == relative_range, | ||
38 | TokenTextRange::Delimiter(it) => { | ||
39 | let open = TextRange::at(it.start(), 1.into()); | ||
40 | let close = TextRange::at(it.end() - TextSize::of('}'), 1.into()); | ||
41 | open == relative_range || close == relative_range | ||
42 | } | ||
43 | })?; | ||
44 | Some(token_id) | ||
45 | } | ||
46 | |||
47 | pub fn range_by_token(&self, token_id: tt::TokenId, kind: SyntaxKind) -> Option<TextRange> { | ||
48 | let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?; | ||
49 | range.by_kind(kind) | ||
50 | } | ||
51 | |||
52 | pub(crate) fn shrink_to_fit(&mut self) { | ||
53 | self.entries.shrink_to_fit(); | ||
54 | } | ||
55 | |||
56 | pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { | ||
57 | self.entries.push((token_id, TokenTextRange::Token(relative_range))); | ||
58 | } | ||
59 | |||
60 | pub(crate) fn insert_delim( | ||
61 | &mut self, | ||
62 | token_id: tt::TokenId, | ||
63 | open_relative_range: TextRange, | ||
64 | close_relative_range: TextRange, | ||
65 | ) -> usize { | ||
66 | let res = self.entries.len(); | ||
67 | let cover = open_relative_range.cover(close_relative_range); | ||
68 | |||
69 | self.entries.push((token_id, TokenTextRange::Delimiter(cover))); | ||
70 | res | ||
71 | } | ||
72 | |||
73 | pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { | ||
74 | let (_, token_text_range) = &mut self.entries[idx]; | ||
75 | if let TokenTextRange::Delimiter(dim) = token_text_range { | ||
76 | let cover = dim.cover(close_relative_range); | ||
77 | *token_text_range = TokenTextRange::Delimiter(cover); | ||
78 | } | ||
79 | } | ||
80 | |||
81 | pub(crate) fn remove_delim(&mut self, idx: usize) { | ||
82 | // FIXME: This could be accidentally quadratic | ||
83 | self.entries.remove(idx); | ||
84 | } | ||
85 | } | ||