aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--crates/cfg/src/tests.rs8
-rw-r--r--crates/hir_def/src/attr.rs2
-rw-r--r--crates/hir_expand/src/builtin_macro.rs2
-rw-r--r--crates/hir_expand/src/db.rs14
-rw-r--r--crates/hir_expand/src/eager.rs4
-rw-r--r--crates/mbe/src/benchmark.rs2
-rw-r--r--crates/mbe/src/expander.rs6
-rw-r--r--crates/mbe/src/syntax_bridge.rs20
-rw-r--r--crates/mbe/src/tests.rs11
-rw-r--r--crates/mbe/src/tests/rule.rs2
-rw-r--r--crates/rust-analyzer/src/cargo_target_spec.rs2
11 files changed, 32 insertions, 41 deletions
diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs
index bd0f9ec48..d8736c893 100644
--- a/crates/cfg/src/tests.rs
+++ b/crates/cfg/src/tests.rs
@@ -8,7 +8,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) {
8 let (tt, _) = { 8 let (tt, _) = {
9 let source_file = ast::SourceFile::parse(input).ok().unwrap(); 9 let source_file = ast::SourceFile::parse(input).ok().unwrap();
10 let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); 10 let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
11 ast_to_token_tree(&tt).unwrap() 11 ast_to_token_tree(&tt)
12 }; 12 };
13 let cfg = CfgExpr::parse(&tt); 13 let cfg = CfgExpr::parse(&tt);
14 assert_eq!(cfg, expected); 14 assert_eq!(cfg, expected);
@@ -18,7 +18,7 @@ fn check_dnf(input: &str, expect: Expect) {
18 let (tt, _) = { 18 let (tt, _) = {
19 let source_file = ast::SourceFile::parse(input).ok().unwrap(); 19 let source_file = ast::SourceFile::parse(input).ok().unwrap();
20 let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); 20 let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
21 ast_to_token_tree(&tt).unwrap() 21 ast_to_token_tree(&tt)
22 }; 22 };
23 let cfg = CfgExpr::parse(&tt); 23 let cfg = CfgExpr::parse(&tt);
24 let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); 24 let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
@@ -29,7 +29,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
29 let (tt, _) = { 29 let (tt, _) = {
30 let source_file = ast::SourceFile::parse(input).ok().unwrap(); 30 let source_file = ast::SourceFile::parse(input).ok().unwrap();
31 let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); 31 let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
32 ast_to_token_tree(&tt).unwrap() 32 ast_to_token_tree(&tt)
33 }; 33 };
34 let cfg = CfgExpr::parse(&tt); 34 let cfg = CfgExpr::parse(&tt);
35 let dnf = DnfExpr::new(cfg); 35 let dnf = DnfExpr::new(cfg);
@@ -42,7 +42,7 @@ fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
42 let (tt, _) = { 42 let (tt, _) = {
43 let source_file = ast::SourceFile::parse(input).ok().unwrap(); 43 let source_file = ast::SourceFile::parse(input).ok().unwrap();
44 let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); 44 let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
45 ast_to_token_tree(&tt).unwrap() 45 ast_to_token_tree(&tt)
46 }; 46 };
47 let cfg = CfgExpr::parse(&tt); 47 let cfg = CfgExpr::parse(&tt);
48 let dnf = DnfExpr::new(cfg); 48 let dnf = DnfExpr::new(cfg);
diff --git a/crates/hir_def/src/attr.rs b/crates/hir_def/src/attr.rs
index 2bab121d9..442c5fb5b 100644
--- a/crates/hir_def/src/attr.rs
+++ b/crates/hir_def/src/attr.rs
@@ -533,7 +533,7 @@ impl Attr {
533 }; 533 };
534 Some(AttrInput::Literal(value)) 534 Some(AttrInput::Literal(value))
535 } else if let Some(tt) = ast.token_tree() { 535 } else if let Some(tt) = ast.token_tree() {
536 Some(AttrInput::TokenTree(ast_to_token_tree(&tt)?.0)) 536 Some(AttrInput::TokenTree(ast_to_token_tree(&tt).0))
537 } else { 537 } else {
538 None 538 None
539 }; 539 };
diff --git a/crates/hir_expand/src/builtin_macro.rs b/crates/hir_expand/src/builtin_macro.rs
index 3aa3d8997..75ec4196b 100644
--- a/crates/hir_expand/src/builtin_macro.rs
+++ b/crates/hir_expand/src/builtin_macro.rs
@@ -584,7 +584,7 @@ mod tests {
584 }; 584 };
585 585
586 let args = macro_call.token_tree().unwrap(); 586 let args = macro_call.token_tree().unwrap();
587 let parsed_args = mbe::ast_to_token_tree(&args).unwrap().0; 587 let parsed_args = mbe::ast_to_token_tree(&args).0;
588 let call_id = AstId::new(file_id.into(), ast_id_map.ast_id(&macro_call)); 588 let call_id = AstId::new(file_id.into(), ast_id_map.ast_id(&macro_call));
589 589
590 let arg_id = db.intern_eager_expansion({ 590 let arg_id = db.intern_eager_expansion({
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs
index c0ab70b60..10fe60821 100644
--- a/crates/hir_expand/src/db.rs
+++ b/crates/hir_expand/src/db.rs
@@ -119,7 +119,7 @@ pub fn expand_hypothetical(
119 token_to_map: syntax::SyntaxToken, 119 token_to_map: syntax::SyntaxToken,
120) -> Option<(SyntaxNode, syntax::SyntaxToken)> { 120) -> Option<(SyntaxNode, syntax::SyntaxToken)> {
121 let macro_file = MacroFile { macro_call_id: actual_macro_call }; 121 let macro_file = MacroFile { macro_call_id: actual_macro_call };
122 let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()).unwrap(); 122 let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax());
123 let range = 123 let range =
124 token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?; 124 token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?;
125 let token_id = tmap_1.token_by_range(range)?; 125 let token_id = tmap_1.token_by_range(range)?;
@@ -143,10 +143,7 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander,
143 MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) { 143 MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) {
144 syntax::ast::Macro::MacroRules(macro_rules) => { 144 syntax::ast::Macro::MacroRules(macro_rules) => {
145 let arg = macro_rules.token_tree()?; 145 let arg = macro_rules.token_tree()?;
146 let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| { 146 let (tt, tmap) = mbe::ast_to_token_tree(&arg);
147 log::warn!("fail on macro_rules to token tree: {:#?}", arg);
148 None
149 })?;
150 let rules = match MacroRules::parse(&tt) { 147 let rules = match MacroRules::parse(&tt) {
151 Ok(it) => it, 148 Ok(it) => it,
152 Err(err) => { 149 Err(err) => {
@@ -159,10 +156,7 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander,
159 } 156 }
160 syntax::ast::Macro::MacroDef(macro_def) => { 157 syntax::ast::Macro::MacroDef(macro_def) => {
161 let arg = macro_def.body()?; 158 let arg = macro_def.body()?;
162 let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| { 159 let (tt, tmap) = mbe::ast_to_token_tree(&arg);
163 log::warn!("fail on macro_def to token tree: {:#?}", arg);
164 None
165 })?;
166 let rules = match MacroDef::parse(&tt) { 160 let rules = match MacroDef::parse(&tt) {
167 Ok(it) => it, 161 Ok(it) => it,
168 Err(err) => { 162 Err(err) => {
@@ -202,7 +196,7 @@ fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
202 196
203fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { 197fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
204 let arg = db.macro_arg_text(id)?; 198 let arg = db.macro_arg_text(id)?;
205 let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg))?; 199 let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg));
206 Some(Arc::new((tt, tmap))) 200 Some(Arc::new((tt, tmap)))
207} 201}
208 202
diff --git a/crates/hir_expand/src/eager.rs b/crates/hir_expand/src/eager.rs
index 9eedc8461..9705526fa 100644
--- a/crates/hir_expand/src/eager.rs
+++ b/crates/hir_expand/src/eager.rs
@@ -106,7 +106,7 @@ pub fn expand_eager_macro(
106 mut diagnostic_sink: &mut dyn FnMut(mbe::ExpandError), 106 mut diagnostic_sink: &mut dyn FnMut(mbe::ExpandError),
107) -> Result<EagerMacroId, ErrorEmitted> { 107) -> Result<EagerMacroId, ErrorEmitted> {
108 let parsed_args = diagnostic_sink.option_with( 108 let parsed_args = diagnostic_sink.option_with(
109 || Some(mbe::ast_to_token_tree(&macro_call.value.token_tree()?)?.0), 109 || Some(mbe::ast_to_token_tree(&macro_call.value.token_tree()?).0),
110 || err("malformed macro invocation"), 110 || err("malformed macro invocation"),
111 )?; 111 )?;
112 112
@@ -161,7 +161,7 @@ pub fn expand_eager_macro(
161} 161}
162 162
163fn to_subtree(node: &SyntaxNode) -> Option<tt::Subtree> { 163fn to_subtree(node: &SyntaxNode) -> Option<tt::Subtree> {
164 let mut subtree = mbe::syntax_node_to_token_tree(node)?.0; 164 let mut subtree = mbe::syntax_node_to_token_tree(node).0;
165 subtree.delimiter = None; 165 subtree.delimiter = None;
166 Some(subtree) 166 Some(subtree)
167} 167}
diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs
index ba814a2e1..38707ffa5 100644
--- a/crates/mbe/src/benchmark.rs
+++ b/crates/mbe/src/benchmark.rs
@@ -65,7 +65,7 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
65 .filter_map(ast::MacroRules::cast) 65 .filter_map(ast::MacroRules::cast)
66 .map(|rule| { 66 .map(|rule| {
67 let id = rule.name().unwrap().to_string(); 67 let id = rule.name().unwrap().to_string();
68 let (def_tt, _) = ast_to_token_tree(&rule.token_tree().unwrap()).unwrap(); 68 let (def_tt, _) = ast_to_token_tree(&rule.token_tree().unwrap());
69 (id, def_tt) 69 (id, def_tt)
70 }) 70 })
71 .collect() 71 .collect()
diff --git a/crates/mbe/src/expander.rs b/crates/mbe/src/expander.rs
index 3197c834c..bfef7f73d 100644
--- a/crates/mbe/src/expander.rs
+++ b/crates/mbe/src/expander.rs
@@ -159,8 +159,7 @@ mod tests {
159 let macro_definition = 159 let macro_definition =
160 source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap(); 160 source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap();
161 161
162 let (definition_tt, _) = 162 let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap());
163 ast_to_token_tree(&macro_definition.token_tree().unwrap()).unwrap();
164 crate::MacroRules::parse(&definition_tt).unwrap() 163 crate::MacroRules::parse(&definition_tt).unwrap()
165 } 164 }
166 165
@@ -169,8 +168,7 @@ mod tests {
169 let macro_invocation = 168 let macro_invocation =
170 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); 169 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
171 170
172 let (invocation_tt, _) = 171 let (invocation_tt, _) = ast_to_token_tree(&macro_invocation.token_tree().unwrap());
173 ast_to_token_tree(&macro_invocation.token_tree().unwrap()).unwrap();
174 172
175 expand_rules(&rules.rules, &invocation_tt) 173 expand_rules(&rules.rules, &invocation_tt)
176 } 174 }
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index ae0780072..9ba98f7fb 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -43,18 +43,18 @@ pub struct TokenMap {
43 43
44/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro 44/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
45/// will consume). 45/// will consume).
46pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> Option<(tt::Subtree, TokenMap)> { 46pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> (tt::Subtree, TokenMap) {
47 syntax_node_to_token_tree(ast.syntax()) 47 syntax_node_to_token_tree(ast.syntax())
48} 48}
49 49
50/// Convert the syntax node to a `TokenTree` (what macro 50/// Convert the syntax node to a `TokenTree` (what macro
51/// will consume). 51/// will consume).
52pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> { 52pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
53 let global_offset = node.text_range().start(); 53 let global_offset = node.text_range().start();
54 let mut c = Convertor::new(node, global_offset); 54 let mut c = Convertor::new(node, global_offset);
55 let subtree = c.go()?; 55 let subtree = c.go();
56 c.id_alloc.map.entries.shrink_to_fit(); 56 c.id_alloc.map.entries.shrink_to_fit();
57 Some((subtree, c.id_alloc.map)) 57 (subtree, c.id_alloc.map)
58} 58}
59 59
60// The following items are what `rustc` macro can be parsed into : 60// The following items are what `rustc` macro can be parsed into :
@@ -108,7 +108,7 @@ pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
108 }, 108 },
109 }; 109 };
110 110
111 let subtree = conv.go()?; 111 let subtree = conv.go();
112 Some((subtree, conv.id_alloc.map)) 112 Some((subtree, conv.id_alloc.map))
113} 113}
114 114
@@ -319,7 +319,7 @@ trait SrcToken: std::fmt::Debug {
319trait TokenConvertor { 319trait TokenConvertor {
320 type Token: SrcToken; 320 type Token: SrcToken;
321 321
322 fn go(&mut self) -> Option<tt::Subtree> { 322 fn go(&mut self) -> tt::Subtree {
323 let mut subtree = tt::Subtree::default(); 323 let mut subtree = tt::Subtree::default();
324 subtree.delimiter = None; 324 subtree.delimiter = None;
325 while self.peek().is_some() { 325 while self.peek().is_some() {
@@ -327,10 +327,10 @@ trait TokenConvertor {
327 } 327 }
328 if subtree.token_trees.len() == 1 { 328 if subtree.token_trees.len() == 1 {
329 if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] { 329 if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] {
330 return Some(first.clone()); 330 return first.clone();
331 } 331 }
332 } 332 }
333 Some(subtree) 333 subtree
334 } 334 }
335 335
336 fn collect_leaf(&mut self, result: &mut Vec<tt::TokenTree>) { 336 fn collect_leaf(&mut self, result: &mut Vec<tt::TokenTree>) {
@@ -858,7 +858,7 @@ mod tests {
858 // - T!['}'] 858 // - T!['}']
859 // - WHITE_SPACE 859 // - WHITE_SPACE
860 let token_tree = ast::TokenTree::cast(token_tree).unwrap(); 860 let token_tree = ast::TokenTree::cast(token_tree).unwrap();
861 let tt = ast_to_token_tree(&token_tree).unwrap().0; 861 let tt = ast_to_token_tree(&token_tree).0;
862 862
863 assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace)); 863 assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace));
864 } 864 }
@@ -867,7 +867,7 @@ mod tests {
867 fn test_token_tree_multi_char_punct() { 867 fn test_token_tree_multi_char_punct() {
868 let source_file = ast::SourceFile::parse("struct Foo { a: x::Y }").ok().unwrap(); 868 let source_file = ast::SourceFile::parse("struct Foo { a: x::Y }").ok().unwrap();
869 let struct_def = source_file.syntax().descendants().find_map(ast::Struct::cast).unwrap(); 869 let struct_def = source_file.syntax().descendants().find_map(ast::Struct::cast).unwrap();
870 let tt = ast_to_token_tree(&struct_def).unwrap().0; 870 let tt = ast_to_token_tree(&struct_def).0;
871 token_tree_to_syntax_node(&tt, FragmentKind::Item).unwrap(); 871 token_tree_to_syntax_node(&tt, FragmentKind::Item).unwrap();
872 } 872 }
873} 873}
diff --git a/crates/mbe/src/tests.rs b/crates/mbe/src/tests.rs
index 6da18ecf4..3698ff3f0 100644
--- a/crates/mbe/src/tests.rs
+++ b/crates/mbe/src/tests.rs
@@ -29,8 +29,7 @@ macro_rules! impl_fixture {
29 let macro_invocation = 29 let macro_invocation =
30 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); 30 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
31 31
32 let (invocation_tt, _) = ast_to_token_tree(&macro_invocation.token_tree().unwrap()) 32 let (invocation_tt, _) = ast_to_token_tree(&macro_invocation.token_tree().unwrap());
33 .ok_or_else(|| ExpandError::ConversionError)?;
34 33
35 self.rules.expand(&invocation_tt).result() 34 self.rules.expand(&invocation_tt).result()
36 } 35 }
@@ -101,7 +100,7 @@ macro_rules! impl_fixture {
101 .descendants() 100 .descendants()
102 .find_map(ast::TokenTree::cast) 101 .find_map(ast::TokenTree::cast)
103 .unwrap(); 102 .unwrap();
104 let mut wrapped = ast_to_token_tree(&wrapped).unwrap().0; 103 let mut wrapped = ast_to_token_tree(&wrapped).0;
105 wrapped.delimiter = None; 104 wrapped.delimiter = None;
106 wrapped 105 wrapped
107 }; 106 };
@@ -151,7 +150,7 @@ pub(crate) fn parse_macro_error(ra_fixture: &str) -> ParseError {
151 150
152pub(crate) fn parse_to_token_tree_by_syntax(ra_fixture: &str) -> tt::Subtree { 151pub(crate) fn parse_to_token_tree_by_syntax(ra_fixture: &str) -> tt::Subtree {
153 let source_file = ast::SourceFile::parse(ra_fixture).ok().unwrap(); 152 let source_file = ast::SourceFile::parse(ra_fixture).ok().unwrap();
154 let tt = syntax_node_to_token_tree(source_file.syntax()).unwrap().0; 153 let tt = syntax_node_to_token_tree(source_file.syntax()).0;
155 154
156 let parsed = parse_to_token_tree(ra_fixture).unwrap().0; 155 let parsed = parse_to_token_tree(ra_fixture).unwrap().0;
157 assert_eq!(tt, parsed); 156 assert_eq!(tt, parsed);
@@ -164,7 +163,7 @@ fn parse_macro_rules_to_tt(ra_fixture: &str) -> tt::Subtree {
164 let macro_definition = 163 let macro_definition =
165 source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap(); 164 source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap();
166 165
167 let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap()).unwrap(); 166 let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap());
168 167
169 let parsed = parse_to_token_tree( 168 let parsed = parse_to_token_tree(
170 &ra_fixture[macro_definition.token_tree().unwrap().syntax().text_range()], 169 &ra_fixture[macro_definition.token_tree().unwrap().syntax().text_range()],
@@ -181,7 +180,7 @@ fn parse_macro_def_to_tt(ra_fixture: &str) -> tt::Subtree {
181 let macro_definition = 180 let macro_definition =
182 source_file.syntax().descendants().find_map(ast::MacroDef::cast).unwrap(); 181 source_file.syntax().descendants().find_map(ast::MacroDef::cast).unwrap();
183 182
184 let (definition_tt, _) = ast_to_token_tree(&macro_definition.body().unwrap()).unwrap(); 183 let (definition_tt, _) = ast_to_token_tree(&macro_definition.body().unwrap());
185 184
186 let parsed = 185 let parsed =
187 parse_to_token_tree(&ra_fixture[macro_definition.body().unwrap().syntax().text_range()]) 186 parse_to_token_tree(&ra_fixture[macro_definition.body().unwrap().syntax().text_range()])
diff --git a/crates/mbe/src/tests/rule.rs b/crates/mbe/src/tests/rule.rs
index bf48112b3..5c61a98fd 100644
--- a/crates/mbe/src/tests/rule.rs
+++ b/crates/mbe/src/tests/rule.rs
@@ -44,6 +44,6 @@ fn parse_macro_arm(arm_definition: &str) -> Result<crate::MacroRules, ParseError
44 let macro_definition = 44 let macro_definition =
45 source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap(); 45 source_file.syntax().descendants().find_map(ast::MacroRules::cast).unwrap();
46 46
47 let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap()).unwrap(); 47 let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap());
48 crate::MacroRules::parse(&definition_tt) 48 crate::MacroRules::parse(&definition_tt)
49} 49}
diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs
index 5af0802a2..909c21532 100644
--- a/crates/rust-analyzer/src/cargo_target_spec.rs
+++ b/crates/rust-analyzer/src/cargo_target_spec.rs
@@ -201,7 +201,7 @@ mod tests {
201 let cfg_expr = { 201 let cfg_expr = {
202 let source_file = ast::SourceFile::parse(cfg).ok().unwrap(); 202 let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
203 let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); 203 let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
204 let (tt, _) = ast_to_token_tree(&tt).unwrap(); 204 let (tt, _) = ast_to_token_tree(&tt);
205 CfgExpr::parse(&tt) 205 CfgExpr::parse(&tt)
206 }; 206 };
207 207