aboutsummaryrefslogtreecommitdiff
path: root/crates/mbe
diff options
context:
space:
mode:
Diffstat (limited to 'crates/mbe')
-rw-r--r--crates/mbe/src/lib.rs10
-rw-r--r--crates/mbe/src/syntax_bridge.rs83
-rw-r--r--crates/mbe/src/tests/expand.rs5
-rw-r--r--crates/mbe/src/token_map.rs85
4 files changed, 97 insertions, 86 deletions
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs
index 3af5bc18b..b95374b76 100644
--- a/crates/mbe/src/lib.rs
+++ b/crates/mbe/src/lib.rs
@@ -14,6 +14,7 @@ mod tests;
14 14
15#[cfg(test)] 15#[cfg(test)]
16mod benchmark; 16mod benchmark;
17mod token_map;
17 18
18use std::fmt; 19use std::fmt;
19 20
@@ -63,9 +64,12 @@ impl fmt::Display for ExpandError {
63 } 64 }
64} 65}
65 66
66pub use crate::syntax_bridge::{ 67pub use crate::{
67 ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, 68 syntax_bridge::{
68 token_tree_to_syntax_node, TokenMap, 69 ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
70 token_tree_to_syntax_node,
71 },
72 token_map::TokenMap,
69}; 73};
70 74
71/// This struct contains AST for a single `macro_rules` definition. What might 75/// This struct contains AST for a single `macro_rules` definition. What might
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index b13168bd3..b11172caf 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -10,36 +10,8 @@ use syntax::{
10}; 10};
11use tt::buffer::{Cursor, TokenBuffer}; 11use tt::buffer::{Cursor, TokenBuffer};
12 12
13use crate::ExpandError;
14use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter}; 13use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter};
15 14use crate::{ExpandError, TokenMap};
16#[derive(Debug, PartialEq, Eq, Clone, Copy)]
17pub enum TokenTextRange {
18 Token(TextRange),
19 Delimiter(TextRange),
20}
21
22impl TokenTextRange {
23 pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
24 match self {
25 TokenTextRange::Token(it) => Some(it),
26 TokenTextRange::Delimiter(it) => match kind {
27 T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())),
28 T!['}'] | T![')'] | T![']'] => {
29 Some(TextRange::at(it.end() - TextSize::of('}'), 1.into()))
30 }
31 _ => None,
32 },
33 }
34 }
35}
36
37/// Maps `tt::TokenId` to the relative range of the original token.
38#[derive(Debug, PartialEq, Eq, Clone, Default)]
39pub struct TokenMap {
40 /// Maps `tt::TokenId` to the *relative* source range.
41 entries: Vec<(tt::TokenId, TokenTextRange)>,
42}
43 15
44/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro 16/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
45/// will consume). 17/// will consume).
@@ -53,7 +25,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
53 let global_offset = node.text_range().start(); 25 let global_offset = node.text_range().start();
54 let mut c = Convertor::new(node, global_offset); 26 let mut c = Convertor::new(node, global_offset);
55 let subtree = c.go(); 27 let subtree = c.go();
56 c.id_alloc.map.entries.shrink_to_fit(); 28 c.id_alloc.map.shrink_to_fit();
57 (subtree, c.id_alloc.map) 29 (subtree, c.id_alloc.map)
58} 30}
59 31
@@ -149,55 +121,6 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
149 res 121 res
150} 122}
151 123
152impl TokenMap {
153 pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
154 let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
155 TokenTextRange::Token(it) => *it == relative_range,
156 TokenTextRange::Delimiter(it) => {
157 let open = TextRange::at(it.start(), 1.into());
158 let close = TextRange::at(it.end() - TextSize::of('}'), 1.into());
159 open == relative_range || close == relative_range
160 }
161 })?;
162 Some(token_id)
163 }
164
165 pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TokenTextRange> {
166 let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
167 Some(range)
168 }
169
170 fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
171 self.entries.push((token_id, TokenTextRange::Token(relative_range)));
172 }
173
174 fn insert_delim(
175 &mut self,
176 token_id: tt::TokenId,
177 open_relative_range: TextRange,
178 close_relative_range: TextRange,
179 ) -> usize {
180 let res = self.entries.len();
181 let cover = open_relative_range.cover(close_relative_range);
182
183 self.entries.push((token_id, TokenTextRange::Delimiter(cover)));
184 res
185 }
186
187 fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
188 let (_, token_text_range) = &mut self.entries[idx];
189 if let TokenTextRange::Delimiter(dim) = token_text_range {
190 let cover = dim.cover(close_relative_range);
191 *token_text_range = TokenTextRange::Delimiter(cover);
192 }
193 }
194
195 fn remove_delim(&mut self, idx: usize) {
196 // FIXME: This could be accidentally quadratic
197 self.entries.remove(idx);
198 }
199}
200
201/// Returns the textual content of a doc comment block as a quoted string 124/// Returns the textual content of a doc comment block as a quoted string
202/// That is, strips leading `///` (or `/**`, etc) 125/// That is, strips leading `///` (or `/**`, etc)
203/// and strips the ending `*/` 126/// and strips the ending `*/`
@@ -634,7 +557,7 @@ impl<'a> TtTreeSink<'a> {
634 } 557 }
635 558
636 fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) { 559 fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) {
637 self.token_map.entries.shrink_to_fit(); 560 self.token_map.shrink_to_fit();
638 (self.inner.finish(), self.token_map) 561 (self.inner.finish(), self.token_map)
639 } 562 }
640} 563}
diff --git a/crates/mbe/src/tests/expand.rs b/crates/mbe/src/tests/expand.rs
index 3a1d840ea..5f173f513 100644
--- a/crates/mbe/src/tests/expand.rs
+++ b/crates/mbe/src/tests/expand.rs
@@ -58,9 +58,8 @@ macro_rules! foobar {
58 let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap(); 58 let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap();
59 let content = node.syntax_node().to_string(); 59 let content = node.syntax_node().to_string();
60 60
61 let get_text = |id, kind| -> String { 61 let get_text =
62 content[token_map.range_by_token(id).unwrap().by_kind(kind).unwrap()].to_string() 62 |id, kind| -> String { content[token_map.range_by_token(id, kind).unwrap()].to_string() };
63 };
64 63
65 assert_eq!(expanded.token_trees.len(), 4); 64 assert_eq!(expanded.token_trees.len(), 4);
66 // {($e:ident) => { fn $e() {} }} 65 // {($e:ident) => { fn $e() {} }}
diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs
new file mode 100644
index 000000000..6df3de3b3
--- /dev/null
+++ b/crates/mbe/src/token_map.rs
@@ -0,0 +1,85 @@
1//! Mapping between `TokenId`s and the token's position in macro definitions or inputs.
2
3use parser::{SyntaxKind, T};
4use syntax::{TextRange, TextSize};
5
6#[derive(Debug, PartialEq, Eq, Clone, Copy)]
7enum TokenTextRange {
8 Token(TextRange),
9 Delimiter(TextRange),
10}
11
12impl TokenTextRange {
13 fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
14 match self {
15 TokenTextRange::Token(it) => Some(it),
16 TokenTextRange::Delimiter(it) => match kind {
17 T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())),
18 T!['}'] | T![')'] | T![']'] => {
19 Some(TextRange::at(it.end() - TextSize::of('}'), 1.into()))
20 }
21 _ => None,
22 },
23 }
24 }
25}
26
27/// Maps `tt::TokenId` to the relative range of the original token.
28#[derive(Debug, PartialEq, Eq, Clone, Default)]
29pub struct TokenMap {
30 /// Maps `tt::TokenId` to the *relative* source range.
31 entries: Vec<(tt::TokenId, TokenTextRange)>,
32}
33
34impl TokenMap {
35 pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
36 let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
37 TokenTextRange::Token(it) => *it == relative_range,
38 TokenTextRange::Delimiter(it) => {
39 let open = TextRange::at(it.start(), 1.into());
40 let close = TextRange::at(it.end() - TextSize::of('}'), 1.into());
41 open == relative_range || close == relative_range
42 }
43 })?;
44 Some(token_id)
45 }
46
47 pub fn range_by_token(&self, token_id: tt::TokenId, kind: SyntaxKind) -> Option<TextRange> {
48 let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
49 range.by_kind(kind)
50 }
51
52 pub(crate) fn shrink_to_fit(&mut self) {
53 self.entries.shrink_to_fit();
54 }
55
56 pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
57 self.entries.push((token_id, TokenTextRange::Token(relative_range)));
58 }
59
60 pub(crate) fn insert_delim(
61 &mut self,
62 token_id: tt::TokenId,
63 open_relative_range: TextRange,
64 close_relative_range: TextRange,
65 ) -> usize {
66 let res = self.entries.len();
67 let cover = open_relative_range.cover(close_relative_range);
68
69 self.entries.push((token_id, TokenTextRange::Delimiter(cover)));
70 res
71 }
72
73 pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
74 let (_, token_text_range) = &mut self.entries[idx];
75 if let TokenTextRange::Delimiter(dim) = token_text_range {
76 let cover = dim.cover(close_relative_range);
77 *token_text_range = TokenTextRange::Delimiter(cover);
78 }
79 }
80
81 pub(crate) fn remove_delim(&mut self, idx: usize) {
82 // FIXME: This could be accidentally quadratic
83 self.entries.remove(idx);
84 }
85}