aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2020-10-07 10:32:47 +0100
committerGitHub <[email protected]>2020-10-07 10:32:47 +0100
commitfaddea935332de3156a5462baa07136bf2e98bf9 (patch)
tree2bf4b95ff97c0cd9ed8a69ed63808fffffd521f2 /crates
parenta184c83535230af39fad3e238b7422b4972d597e (diff)
parent27798ee575a975a1806ced86aca8aea407897851 (diff)
Merge #6158
6158: Fix for negative literals in macros r=matklad a=cutsoy _This pull request fixes #6028._ When writing `-42.0f32` in Rust, it is usually parsed as two different tokens (a minus operator and a float literal). But a procedural macro can also generate new tokens, including negative [float literals](https://doc.rust-lang.org/stable/proc_macro/struct.Literal.html#method.f32_suffixed): ```rust #[proc_macro] fn example_verbose(input: TokenStream) -> TokenStream { let literal = Literal::f32_suffixed(-42.0); quote! { #literal } } ``` or even shorter ```rust #[proc_macro] fn example(input: TokenStream) -> TokenStream { let literal = -42.0f32; quote! { #literal } } ``` Unfortunately, these currently cause RA to crash: ``` thread '<unnamed>' panicked at 'Fail to convert given literal Literal { text: "-42.0f32", id: TokenId( 4294967295, ), }', crates/mbe/src/subtree_source.rs:161:28 ``` This pull request contains both a fix 8cf9362 and a unit test 27798ee. In addition, I installed the patched server with `cargo xtask install --server` and verified in VSCode that it no longer crashes when a procedural macro returns a negative number literal. Co-authored-by: Tim <[email protected]>
Diffstat (limited to 'crates')
-rw-r--r--crates/mbe/src/subtree_source.rs38
1 files changed, 33 insertions, 5 deletions
diff --git a/crates/mbe/src/subtree_source.rs b/crates/mbe/src/subtree_source.rs
index 41461b315..226dc3bec 100644
--- a/crates/mbe/src/subtree_source.rs
+++ b/crates/mbe/src/subtree_source.rs
@@ -2,7 +2,7 @@
2 2
3use parser::{Token, TokenSource}; 3use parser::{Token, TokenSource};
4use std::cell::{Cell, Ref, RefCell}; 4use std::cell::{Cell, Ref, RefCell};
5use syntax::{lex_single_syntax_kind, SmolStr, SyntaxKind, SyntaxKind::*, T}; 5use syntax::{tokenize, SmolStr, SyntaxKind, SyntaxKind::*, T};
6use tt::buffer::{Cursor, TokenBuffer}; 6use tt::buffer::{Cursor, TokenBuffer};
7 7
8#[derive(Debug, Clone, Eq, PartialEq)] 8#[derive(Debug, Clone, Eq, PartialEq)]
@@ -155,10 +155,17 @@ fn convert_delim(d: Option<tt::DelimiterKind>, closing: bool) -> TtToken {
155} 155}
156 156
157fn convert_literal(l: &tt::Literal) -> TtToken { 157fn convert_literal(l: &tt::Literal) -> TtToken {
158 let kind = lex_single_syntax_kind(&l.text) 158 let mut kinds = tokenize(&l.text).0.into_iter().map(|token| token.kind);
159 .map(|(kind, _error)| kind) 159
160 .filter(|kind| kind.is_literal()) 160 let kind = match kinds.next() {
161 .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l)); 161 Some(kind) if kind.is_literal() => Some(kind),
162 Some(SyntaxKind::MINUS) => match kinds.next() {
163 Some(kind) if kind.is_literal() => Some(kind),
164 _ => None,
165 },
166 _ => None,
167 }
168 .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l));
162 169
163 TtToken { kind, is_joint_to_next: false, text: l.text.clone() } 170 TtToken { kind, is_joint_to_next: false, text: l.text.clone() }
164} 171}
@@ -195,3 +202,24 @@ fn convert_leaf(leaf: &tt::Leaf) -> TtToken {
195 tt::Leaf::Punct(punct) => convert_punct(*punct), 202 tt::Leaf::Punct(punct) => convert_punct(*punct),
196 } 203 }
197} 204}
205
206#[cfg(test)]
207mod tests {
208 use super::{convert_literal, TtToken};
209 use syntax::{SmolStr, SyntaxKind};
210
211 #[test]
212 fn test_negative_literal() {
213 assert_eq!(
214 convert_literal(&tt::Literal {
215 id: tt::TokenId::unspecified(),
216 text: SmolStr::new("-42.0")
217 }),
218 TtToken {
219 kind: SyntaxKind::FLOAT_NUMBER,
220 is_joint_to_next: false,
221 text: SmolStr::new("-42.0")
222 }
223 );
224 }
225}