diff options
author | bors[bot] <26634292+bors[bot]@users.noreply.github.com> | 2021-02-28 05:07:09 +0000 |
---|---|---|
committer | GitHub <[email protected]> | 2021-02-28 05:07:09 +0000 |
commit | e0437f899ceea242173c665f7d124c485b680a7a (patch) | |
tree | 0e623f162d68d88de13c50cfe5b3ff05bc4bf27c /crates | |
parent | cbec9958220a7ce5d51289e2fc59c2eb0754ac87 (diff) | |
parent | f5bf1a9650089ec7bd0a4d3fb69706fab06da308 (diff) |
Merge #7802
7802: Fix builtin macros split exprs on comma r=edwin0cheng a=edwin0cheng
Fixes #7640
bors r+
Co-authored-by: Edwin Cheng <[email protected]>
Diffstat (limited to 'crates')
-rw-r--r-- | crates/hir_expand/src/builtin_macro.rs | 45 | ||||
-rw-r--r-- | crates/mbe/src/expander/matcher.rs | 68 | ||||
-rw-r--r-- | crates/mbe/src/lib.rs | 4 | ||||
-rw-r--r-- | crates/mbe/src/syntax_bridge.rs | 39 | ||||
-rw-r--r-- | crates/mbe/src/tt_iter.rs | 77 |
5 files changed, 144 insertions, 89 deletions
diff --git a/crates/hir_expand/src/builtin_macro.rs b/crates/hir_expand/src/builtin_macro.rs index 57bc6fbd7..6fbb9570a 100644 --- a/crates/hir_expand/src/builtin_macro.rs +++ b/crates/hir_expand/src/builtin_macro.rs | |||
@@ -6,7 +6,7 @@ use crate::{ | |||
6 | 6 | ||
7 | use base_db::{AnchoredPath, FileId}; | 7 | use base_db::{AnchoredPath, FileId}; |
8 | use either::Either; | 8 | use either::Either; |
9 | use mbe::{parse_to_token_tree, ExpandResult}; | 9 | use mbe::{parse_exprs_with_sep, parse_to_token_tree, ExpandResult}; |
10 | use parser::FragmentKind; | 10 | use parser::FragmentKind; |
11 | use syntax::ast::{self, AstToken}; | 11 | use syntax::ast::{self, AstToken}; |
12 | 12 | ||
@@ -238,35 +238,21 @@ fn format_args_expand( | |||
238 | // ]) | 238 | // ]) |
239 | // ```, | 239 | // ```, |
240 | // which is still not really correct, but close enough for now | 240 | // which is still not really correct, but close enough for now |
241 | let mut args = Vec::new(); | 241 | let mut args = parse_exprs_with_sep(tt, ','); |
242 | let mut current = Vec::new(); | 242 | |
243 | for tt in tt.token_trees.iter().cloned() { | ||
244 | match tt { | ||
245 | tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',' => { | ||
246 | args.push(current); | ||
247 | current = Vec::new(); | ||
248 | } | ||
249 | _ => { | ||
250 | current.push(tt); | ||
251 | } | ||
252 | } | ||
253 | } | ||
254 | if !current.is_empty() { | ||
255 | args.push(current); | ||
256 | } | ||
257 | if args.is_empty() { | 243 | if args.is_empty() { |
258 | return ExpandResult::only_err(mbe::ExpandError::NoMatchingRule); | 244 | return ExpandResult::only_err(mbe::ExpandError::NoMatchingRule); |
259 | } | 245 | } |
260 | for arg in &mut args { | 246 | for arg in &mut args { |
261 | // Remove `key =`. | 247 | // Remove `key =`. |
262 | if matches!(arg.get(1), Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p))) if p.char == '=' && p.spacing != tt::Spacing::Joint) | 248 | if matches!(arg.token_trees.get(1), Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p))) if p.char == '=' && p.spacing != tt::Spacing::Joint) |
263 | { | 249 | { |
264 | arg.drain(..2); | 250 | arg.token_trees.drain(..2); |
265 | } | 251 | } |
266 | } | 252 | } |
267 | let _format_string = args.remove(0); | 253 | let _format_string = args.remove(0); |
268 | let arg_tts = args.into_iter().flat_map(|arg| { | 254 | let arg_tts = args.into_iter().flat_map(|arg| { |
269 | quote! { std::fmt::ArgumentV1::new(&(##arg), std::fmt::Display::fmt), } | 255 | quote! { std::fmt::ArgumentV1::new(&(#arg), std::fmt::Display::fmt), } |
270 | }.token_trees).collect::<Vec<_>>(); | 256 | }.token_trees).collect::<Vec<_>>(); |
271 | let expanded = quote! { | 257 | let expanded = quote! { |
272 | std::fmt::Arguments::new_v1(&[], &[##arg_tts]) | 258 | std::fmt::Arguments::new_v1(&[], &[##arg_tts]) |
@@ -720,6 +706,25 @@ mod tests { | |||
720 | } | 706 | } |
721 | 707 | ||
722 | #[test] | 708 | #[test] |
709 | fn test_format_args_expand_with_comma_exprs() { | ||
710 | let expanded = expand_builtin_macro( | ||
711 | r#" | ||
712 | #[rustc_builtin_macro] | ||
713 | macro_rules! format_args { | ||
714 | ($fmt:expr) => ({ /* compiler built-in */ }); | ||
715 | ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ }) | ||
716 | } | ||
717 | format_args!("{} {:?}", a::<A,B>(), b); | ||
718 | "#, | ||
719 | ); | ||
720 | |||
721 | assert_eq!( | ||
722 | expanded, | ||
723 | r#"std::fmt::Arguments::new_v1(&[], &[std::fmt::ArgumentV1::new(&(a::<A,B>()),std::fmt::Display::fmt),std::fmt::ArgumentV1::new(&(b),std::fmt::Display::fmt),])"# | ||
724 | ); | ||
725 | } | ||
726 | |||
727 | #[test] | ||
723 | fn test_include_bytes_expand() { | 728 | fn test_include_bytes_expand() { |
724 | let expanded = expand_builtin_macro( | 729 | let expanded = expand_builtin_macro( |
725 | r#" | 730 | r#" |
diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 800931cd1..e3bd4c09a 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs | |||
@@ -3,15 +3,13 @@ | |||
3 | use crate::{ | 3 | use crate::{ |
4 | expander::{Binding, Bindings, Fragment}, | 4 | expander::{Binding, Bindings, Fragment}, |
5 | parser::{Op, RepeatKind, Separator}, | 5 | parser::{Op, RepeatKind, Separator}, |
6 | subtree_source::SubtreeTokenSource, | ||
7 | tt_iter::TtIter, | 6 | tt_iter::TtIter, |
8 | ExpandError, MetaTemplate, | 7 | ExpandError, MetaTemplate, |
9 | }; | 8 | }; |
10 | 9 | ||
11 | use super::ExpandResult; | 10 | use super::ExpandResult; |
12 | use parser::{FragmentKind::*, TreeSink}; | 11 | use parser::FragmentKind::*; |
13 | use syntax::{SmolStr, SyntaxKind}; | 12 | use syntax::SmolStr; |
14 | use tt::buffer::{Cursor, TokenBuffer}; | ||
15 | 13 | ||
16 | impl Bindings { | 14 | impl Bindings { |
17 | fn push_optional(&mut self, name: &SmolStr) { | 15 | fn push_optional(&mut self, name: &SmolStr) { |
@@ -409,68 +407,6 @@ impl<'a> TtIter<'a> { | |||
409 | .into()) | 407 | .into()) |
410 | } | 408 | } |
411 | 409 | ||
412 | fn expect_fragment( | ||
413 | &mut self, | ||
414 | fragment_kind: parser::FragmentKind, | ||
415 | ) -> ExpandResult<Option<tt::TokenTree>> { | ||
416 | struct OffsetTokenSink<'a> { | ||
417 | cursor: Cursor<'a>, | ||
418 | error: bool, | ||
419 | } | ||
420 | |||
421 | impl<'a> TreeSink for OffsetTokenSink<'a> { | ||
422 | fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { | ||
423 | if kind == SyntaxKind::LIFETIME_IDENT { | ||
424 | n_tokens = 2; | ||
425 | } | ||
426 | for _ in 0..n_tokens { | ||
427 | self.cursor = self.cursor.bump_subtree(); | ||
428 | } | ||
429 | } | ||
430 | fn start_node(&mut self, _kind: SyntaxKind) {} | ||
431 | fn finish_node(&mut self) {} | ||
432 | fn error(&mut self, _error: parser::ParseError) { | ||
433 | self.error = true; | ||
434 | } | ||
435 | } | ||
436 | |||
437 | let buffer = TokenBuffer::from_tokens(&self.inner.as_slice()); | ||
438 | let mut src = SubtreeTokenSource::new(&buffer); | ||
439 | let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; | ||
440 | |||
441 | parser::parse_fragment(&mut src, &mut sink, fragment_kind); | ||
442 | |||
443 | let mut err = None; | ||
444 | if !sink.cursor.is_root() || sink.error { | ||
445 | err = Some(err!("expected {:?}", fragment_kind)); | ||
446 | } | ||
447 | |||
448 | let mut curr = buffer.begin(); | ||
449 | let mut res = vec![]; | ||
450 | |||
451 | if sink.cursor.is_root() { | ||
452 | while curr != sink.cursor { | ||
453 | if let Some(token) = curr.token_tree() { | ||
454 | res.push(token); | ||
455 | } | ||
456 | curr = curr.bump(); | ||
457 | } | ||
458 | } | ||
459 | self.inner = self.inner.as_slice()[res.len()..].iter(); | ||
460 | if res.len() == 0 && err.is_none() { | ||
461 | err = Some(err!("no tokens consumed")); | ||
462 | } | ||
463 | let res = match res.len() { | ||
464 | 1 => Some(res[0].cloned()), | ||
465 | 0 => None, | ||
466 | _ => Some(tt::TokenTree::Subtree(tt::Subtree { | ||
467 | delimiter: None, | ||
468 | token_trees: res.into_iter().map(|it| it.cloned()).collect(), | ||
469 | })), | ||
470 | }; | ||
471 | ExpandResult { value: res, err } | ||
472 | } | ||
473 | |||
474 | fn eat_vis(&mut self) -> Option<tt::TokenTree> { | 410 | fn eat_vis(&mut self) -> Option<tt::TokenTree> { |
475 | let mut fork = self.clone(); | 411 | let mut fork = self.clone(); |
476 | match fork.expect_fragment(Visibility) { | 412 | match fork.expect_fragment(Visibility) { |
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 6b4a4eb53..4c298f85f 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs | |||
@@ -65,8 +65,8 @@ impl fmt::Display for ExpandError { | |||
65 | } | 65 | } |
66 | 66 | ||
67 | pub use crate::syntax_bridge::{ | 67 | pub use crate::syntax_bridge::{ |
68 | ast_to_token_tree, parse_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, | 68 | ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, |
69 | TokenMap, | 69 | token_tree_to_syntax_node, TokenMap, |
70 | }; | 70 | }; |
71 | 71 | ||
72 | /// This struct contains AST for a single `macro_rules` definition. What might | 72 | /// This struct contains AST for a single `macro_rules` definition. What might |
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 0cdc175be..5a91781fc 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs | |||
@@ -10,8 +10,8 @@ use syntax::{ | |||
10 | }; | 10 | }; |
11 | use tt::buffer::{Cursor, TokenBuffer}; | 11 | use tt::buffer::{Cursor, TokenBuffer}; |
12 | 12 | ||
13 | use crate::subtree_source::SubtreeTokenSource; | ||
14 | use crate::ExpandError; | 13 | use crate::ExpandError; |
14 | use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter}; | ||
15 | 15 | ||
16 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | 16 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] |
17 | pub enum TokenTextRange { | 17 | pub enum TokenTextRange { |
@@ -112,6 +112,43 @@ pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> { | |||
112 | Some((subtree, conv.id_alloc.map)) | 112 | Some((subtree, conv.id_alloc.map)) |
113 | } | 113 | } |
114 | 114 | ||
115 | /// Split token tree with seperate expr: $($e:expr)SEP* | ||
116 | pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> { | ||
117 | if tt.token_trees.is_empty() { | ||
118 | return Vec::new(); | ||
119 | } | ||
120 | |||
121 | let mut iter = TtIter::new(tt); | ||
122 | let mut res = Vec::new(); | ||
123 | |||
124 | while iter.peek_n(0).is_some() { | ||
125 | let expanded = iter.expect_fragment(FragmentKind::Expr); | ||
126 | if expanded.err.is_some() { | ||
127 | break; | ||
128 | } | ||
129 | |||
130 | res.push(match expanded.value { | ||
131 | None => break, | ||
132 | Some(tt @ tt::TokenTree::Leaf(_)) => { | ||
133 | tt::Subtree { delimiter: None, token_trees: vec![tt.into()] } | ||
134 | } | ||
135 | Some(tt::TokenTree::Subtree(tt)) => tt, | ||
136 | }); | ||
137 | |||
138 | let mut fork = iter.clone(); | ||
139 | if fork.expect_char(sep).is_err() { | ||
140 | break; | ||
141 | } | ||
142 | iter = fork; | ||
143 | } | ||
144 | |||
145 | if iter.peek_n(0).is_some() { | ||
146 | res.push(tt::Subtree { delimiter: None, token_trees: iter.into_iter().cloned().collect() }); | ||
147 | } | ||
148 | |||
149 | res | ||
150 | } | ||
151 | |||
115 | impl TokenMap { | 152 | impl TokenMap { |
116 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { | 153 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { |
117 | let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { | 154 | let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { |
diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index 46c420718..a362d31fc 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs | |||
@@ -1,5 +1,20 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use crate::{subtree_source::SubtreeTokenSource, ExpandError, ExpandResult}; | ||
4 | |||
5 | use parser::TreeSink; | ||
6 | use syntax::SyntaxKind; | ||
7 | use tt::buffer::{Cursor, TokenBuffer}; | ||
8 | |||
9 | macro_rules! err { | ||
10 | () => { | ||
11 | ExpandError::BindingError(format!("")) | ||
12 | }; | ||
13 | ($($tt:tt)*) => { | ||
14 | ExpandError::BindingError(format!($($tt)*)) | ||
15 | }; | ||
16 | } | ||
17 | |||
3 | #[derive(Debug, Clone)] | 18 | #[derive(Debug, Clone)] |
4 | pub(crate) struct TtIter<'a> { | 19 | pub(crate) struct TtIter<'a> { |
5 | pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>, | 20 | pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>, |
@@ -56,6 +71,68 @@ impl<'a> TtIter<'a> { | |||
56 | } | 71 | } |
57 | } | 72 | } |
58 | 73 | ||
74 | pub(crate) fn expect_fragment( | ||
75 | &mut self, | ||
76 | fragment_kind: parser::FragmentKind, | ||
77 | ) -> ExpandResult<Option<tt::TokenTree>> { | ||
78 | struct OffsetTokenSink<'a> { | ||
79 | cursor: Cursor<'a>, | ||
80 | error: bool, | ||
81 | } | ||
82 | |||
83 | impl<'a> TreeSink for OffsetTokenSink<'a> { | ||
84 | fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { | ||
85 | if kind == SyntaxKind::LIFETIME_IDENT { | ||
86 | n_tokens = 2; | ||
87 | } | ||
88 | for _ in 0..n_tokens { | ||
89 | self.cursor = self.cursor.bump_subtree(); | ||
90 | } | ||
91 | } | ||
92 | fn start_node(&mut self, _kind: SyntaxKind) {} | ||
93 | fn finish_node(&mut self) {} | ||
94 | fn error(&mut self, _error: parser::ParseError) { | ||
95 | self.error = true; | ||
96 | } | ||
97 | } | ||
98 | |||
99 | let buffer = TokenBuffer::from_tokens(&self.inner.as_slice()); | ||
100 | let mut src = SubtreeTokenSource::new(&buffer); | ||
101 | let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; | ||
102 | |||
103 | parser::parse_fragment(&mut src, &mut sink, fragment_kind); | ||
104 | |||
105 | let mut err = None; | ||
106 | if !sink.cursor.is_root() || sink.error { | ||
107 | err = Some(err!("expected {:?}", fragment_kind)); | ||
108 | } | ||
109 | |||
110 | let mut curr = buffer.begin(); | ||
111 | let mut res = vec![]; | ||
112 | |||
113 | if sink.cursor.is_root() { | ||
114 | while curr != sink.cursor { | ||
115 | if let Some(token) = curr.token_tree() { | ||
116 | res.push(token); | ||
117 | } | ||
118 | curr = curr.bump(); | ||
119 | } | ||
120 | } | ||
121 | self.inner = self.inner.as_slice()[res.len()..].iter(); | ||
122 | if res.len() == 0 && err.is_none() { | ||
123 | err = Some(err!("no tokens consumed")); | ||
124 | } | ||
125 | let res = match res.len() { | ||
126 | 1 => Some(res[0].cloned()), | ||
127 | 0 => None, | ||
128 | _ => Some(tt::TokenTree::Subtree(tt::Subtree { | ||
129 | delimiter: None, | ||
130 | token_trees: res.into_iter().map(|it| it.cloned()).collect(), | ||
131 | })), | ||
132 | }; | ||
133 | ExpandResult { value: res, err } | ||
134 | } | ||
135 | |||
59 | pub(crate) fn peek_n(&self, n: usize) -> Option<&tt::TokenTree> { | 136 | pub(crate) fn peek_n(&self, n: usize) -> Option<&tt::TokenTree> { |
60 | self.inner.as_slice().get(n) | 137 | self.inner.as_slice().get(n) |
61 | } | 138 | } |