From 83b2d78bbbe06cf7e00f8de732f8bd4264b82a46 Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Wed, 1 Jan 2020 02:19:59 +0800 Subject: Supporting extend selection inside macro calls --- crates/ra_ide/src/extend_selection.rs | 123 ++++++++++++++++++++++++++++++---- 1 file changed, 109 insertions(+), 14 deletions(-) diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs index 1ec41a117..c6f558021 100644 --- a/crates/ra_ide/src/extend_selection.rs +++ b/crates/ra_ide/src/extend_selection.rs @@ -4,20 +4,27 @@ use ra_db::SourceDatabase; use ra_syntax::{ algo::find_covering_element, ast::{self, AstNode, AstToken}, - Direction, NodeOrToken, + Direction, NodeOrToken, SyntaxElement, SyntaxKind::{self, *}, SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T, }; use crate::{db::RootDatabase, FileRange}; +use hir::{db::AstDatabase, InFile}; +use itertools::Itertools; // FIXME: restore macro support pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { - let parse = db.parse(frange.file_id); - try_extend_selection(parse.tree().syntax(), frange.range).unwrap_or(frange.range) + let src = db.parse(frange.file_id).tree(); + let root = InFile::new(frange.file_id.into(), src.syntax()); + try_extend_selection(db, root, frange.range).unwrap_or(frange.range) } -fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option { +fn try_extend_selection( + db: &RootDatabase, + root: InFile<&SyntaxNode>, + range: TextRange, +) -> Option { let string_kinds = [COMMENT, STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING]; let list_kinds = [ RECORD_FIELD_PAT_LIST, @@ -40,9 +47,9 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option return None, @@ -58,7 +65,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option { if token.text_range() != range { return Some(token.text_range()); @@ -72,6 +79,16 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option node, }; + + // if we are in single token_tree, we maybe live in macro or attr + if node.kind() == TOKEN_TREE { + if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) { + if let Some(range) = extend_tokens_from_range(db, &root, macro_call, range) { + return Some(range); + } + } + } + if node.text_range() != range { return Some(node.text_range()); } @@ -88,6 +105,67 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option, + macro_call: ast::MacroCall, + original_range: TextRange, +) -> Option { + let analyzer = hir::SourceAnalyzer::new(db, root.clone(), None); + let expansion = analyzer.expand(db, root.with_value(¯o_call))?; + + // compute original mapped token range + let range = macro_call + .syntax() + .descendants_with_tokens() + .filter_map(|n| match n { + NodeOrToken::Token(token) if token.text_range().is_subrange(&original_range) => { + expansion + .map_token_down(db, root.with_value(&token)) + .map(|node| node.value.text_range()) + } + _ => None, + }) + .fold1(|x, y| union_range(x, y))?; + + let src = db.parse_or_expand(expansion.file_id())?; + let parent = shallow_node(&find_covering_element(&src, range))?.parent()?; + + // compute parent mapped token range + let range = macro_call + .syntax() + .descendants_with_tokens() + .filter_map(|n| match n { + NodeOrToken::Token(token) => { + expansion.map_token_down(db, root.with_value(&token)).and_then(|node| { + if node.value.text_range().is_subrange(&parent.text_range()) { + Some(token.text_range()) + } else { + None + } + }) + } + _ => None, + }) + .fold1(|x, y| union_range(x, y))?; + + if original_range.is_subrange(&range) && original_range != range { + Some(range) + } else { + None + } +} + +fn union_range(range: TextRange, r: TextRange) -> TextRange { + let start = range.start().min(r.start()); + let end = range.end().max(r.end()); + TextRange::from_to(start, end) +} + +fn shallow_node(node: &SyntaxElement) -> Option { + node.ancestors().take_while(|n| n.text_range() == node.text_range()).last() +} + fn extend_single_word_in_comment_or_string( leaf: &SyntaxToken, offset: TextUnit, @@ -227,18 +305,19 @@ fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment { #[cfg(test)] mod tests { - use ra_syntax::{AstNode, SourceFile}; - use test_utils::extract_offset; - use super::*; + use crate::mock_analysis::single_file; + use test_utils::extract_offset; fn do_check(before: &str, afters: &[&str]) { let (cursor, before) = extract_offset(before); - let parse = SourceFile::parse(&before); - let mut range = TextRange::offset_len(cursor, 0.into()); + let (analysis, file_id) = single_file(&before); + let range = TextRange::offset_len(cursor, 0.into()); + let mut frange = FileRange { file_id: file_id, range }; + for &after in afters { - range = try_extend_selection(parse.tree().syntax(), range).unwrap(); - let actual = &before[range]; + frange.range = analysis.extend_selection(frange).unwrap(); + let actual = &before[frange.range]; assert_eq!(after, actual); } } @@ -503,4 +582,20 @@ fn main() { let var = ( ], ); } + + #[test] + fn extend_selection_inside_macros() { + do_check( + r#"macro_rules! foo { ($item:item) => {$item} } + foo!{fn hello(na<|>me:usize){}}"#, + &[ + "name", + "name:usize", + "(name:usize)", + "fn hello(name:usize){}", + "{fn hello(name:usize){}}", + "foo!{fn hello(name:usize){}}", + ], + ); + } } -- cgit v1.2.3 From 4c4416543ab14c9a0a246907f41be0658f97c6fc Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Wed, 1 Jan 2020 02:48:19 +0800 Subject: Fix formatting --- crates/ra_ide/src/extend_selection.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs index c6f558021..e48ef8649 100644 --- a/crates/ra_ide/src/extend_selection.rs +++ b/crates/ra_ide/src/extend_selection.rs @@ -113,7 +113,7 @@ fn extend_tokens_from_range( ) -> Option { let analyzer = hir::SourceAnalyzer::new(db, root.clone(), None); let expansion = analyzer.expand(db, root.with_value(¯o_call))?; - + // compute original mapped token range let range = macro_call .syntax() -- cgit v1.2.3 From 07f4171b1803f562118671255d73b97f20d24e07 Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Tue, 7 Jan 2020 04:35:19 +0800 Subject: Minor fix --- crates/ra_ide/src/extend_selection.rs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs index e48ef8649..9b6cc15e8 100644 --- a/crates/ra_ide/src/extend_selection.rs +++ b/crates/ra_ide/src/extend_selection.rs @@ -13,7 +13,6 @@ use crate::{db::RootDatabase, FileRange}; use hir::{db::AstDatabase, InFile}; use itertools::Itertools; -// FIXME: restore macro support pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { let src = db.parse(frange.file_id).tree(); let root = InFile::new(frange.file_id.into(), src.syntax()); @@ -93,8 +92,7 @@ fn try_extend_selection( return Some(node.text_range()); } - // Using shallowest node with same range allows us to traverse siblings. - let node = node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap(); + let node = shallowest_node(&node.into()).unwrap(); if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { if let Some(range) = extend_list_item(&node) { @@ -129,7 +127,7 @@ fn extend_tokens_from_range( .fold1(|x, y| union_range(x, y))?; let src = db.parse_or_expand(expansion.file_id())?; - let parent = shallow_node(&find_covering_element(&src, range))?.parent()?; + let parent = shallowest_node(&find_covering_element(&src, range))?.parent()?; // compute parent mapped token range let range = macro_call @@ -162,7 +160,8 @@ fn union_range(range: TextRange, r: TextRange) -> TextRange { TextRange::from_to(start, end) } -fn shallow_node(node: &SyntaxElement) -> Option { +/// Find the shallowest node with same range, which allows us to traverse siblings. +fn shallowest_node(node: &SyntaxElement) -> Option { node.ancestors().take_while(|n| n.text_range() == node.text_range()).last() } -- cgit v1.2.3 From b30e6a7b56942f31cbd7b9fdb78925bf5b65b247 Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Wed, 8 Jan 2020 00:22:08 +0800 Subject: Handle extend selection in recursive macro --- crates/ra_ide/src/extend_selection.rs | 76 ++++++++++++++++++++++------------- 1 file changed, 49 insertions(+), 27 deletions(-) diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs index 9b6cc15e8..a9ad4b476 100644 --- a/crates/ra_ide/src/extend_selection.rs +++ b/crates/ra_ide/src/extend_selection.rs @@ -9,21 +9,22 @@ use ra_syntax::{ SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T, }; -use crate::{db::RootDatabase, FileRange}; -use hir::{db::AstDatabase, InFile}; +use crate::{db::RootDatabase, expand::descend_into_macros, FileId, FileRange}; +use hir::db::AstDatabase; use itertools::Itertools; pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { let src = db.parse(frange.file_id).tree(); - let root = InFile::new(frange.file_id.into(), src.syntax()); - try_extend_selection(db, root, frange.range).unwrap_or(frange.range) + try_extend_selection(db, src.syntax(), frange).unwrap_or(frange.range) } fn try_extend_selection( db: &RootDatabase, - root: InFile<&SyntaxNode>, - range: TextRange, + root: &SyntaxNode, + frange: FileRange, ) -> Option { + let range = frange.range; + let string_kinds = [COMMENT, STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING]; let list_kinds = [ RECORD_FIELD_PAT_LIST, @@ -46,9 +47,9 @@ fn try_extend_selection( if range.is_empty() { let offset = range.start(); - let mut leaves = root.value.token_at_offset(offset); + let mut leaves = root.token_at_offset(offset); if leaves.clone().all(|it| it.kind() == WHITESPACE) { - return Some(extend_ws(root.value, leaves.next()?, offset)); + return Some(extend_ws(root, leaves.next()?, offset)); } let leaf_range = match leaves { TokenAtOffset::None => return None, @@ -64,7 +65,7 @@ fn try_extend_selection( }; return Some(leaf_range); }; - let node = match find_covering_element(root.value, range) { + let node = match find_covering_element(root, range) { NodeOrToken::Token(token) => { if token.text_range() != range { return Some(token.text_range()); @@ -82,7 +83,7 @@ fn try_extend_selection( // if we are in single token_tree, we maybe live in macro or attr if node.kind() == TOKEN_TREE { if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) { - if let Some(range) = extend_tokens_from_range(db, &root, macro_call, range) { + if let Some(range) = extend_tokens_from_range(db, frange.file_id, macro_call, range) { return Some(range); } } @@ -105,48 +106,52 @@ fn try_extend_selection( fn extend_tokens_from_range( db: &RootDatabase, - root: &InFile<&SyntaxNode>, + file_id: FileId, macro_call: ast::MacroCall, original_range: TextRange, ) -> Option { - let analyzer = hir::SourceAnalyzer::new(db, root.clone(), None); - let expansion = analyzer.expand(db, root.with_value(¯o_call))?; - // compute original mapped token range + let mut expanded = None; let range = macro_call .syntax() .descendants_with_tokens() .filter_map(|n| match n { NodeOrToken::Token(token) if token.text_range().is_subrange(&original_range) => { - expansion - .map_token_down(db, root.with_value(&token)) - .map(|node| node.value.text_range()) + let node = descend_into_macros(db, file_id, token); + match node.file_id { + it if it == file_id.into() => None, + it if expanded.is_none() || expanded == Some(it) => { + expanded = Some(it.into()); + Some(node.value.text_range()) + } + _ => None, + } } _ => None, }) .fold1(|x, y| union_range(x, y))?; - let src = db.parse_or_expand(expansion.file_id())?; + let expanded = expanded?; + let src = db.parse_or_expand(expanded)?; let parent = shallowest_node(&find_covering_element(&src, range))?.parent()?; - // compute parent mapped token range let range = macro_call .syntax() .descendants_with_tokens() .filter_map(|n| match n { NodeOrToken::Token(token) => { - expansion.map_token_down(db, root.with_value(&token)).and_then(|node| { - if node.value.text_range().is_subrange(&parent.text_range()) { - Some(token.text_range()) - } else { - None - } - }) + let node = descend_into_macros(db, file_id, token.clone()); + if node.file_id == expanded + && node.value.text_range().is_subrange(&parent.text_range()) + { + Some(token.text_range()) + } else { + None + } } _ => None, }) .fold1(|x, y| union_range(x, y))?; - if original_range.is_subrange(&range) && original_range != range { Some(range) } else { @@ -597,4 +602,21 @@ fn main() { let var = ( ], ); } + + #[test] + fn extend_selection_inside_recur_macros() { + do_check( + r#" macro_rules! foo2 { ($item:item) => {$item} } + macro_rules! foo { ($item:item) => {foo2!($item);} } + foo!{fn hello(na<|>me:usize){}}"#, + &[ + "name", + "name:usize", + "(name:usize)", + "fn hello(name:usize){}", + "{fn hello(name:usize){}}", + "foo!{fn hello(name:usize){}}", + ], + ); + } } -- cgit v1.2.3 From caed836e417a239ae1e384f7e977352b846a3804 Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Thu, 9 Jan 2020 04:03:50 +0800 Subject: Use first and last token only --- crates/ra_ide/src/extend_selection.rs | 91 +++++++++++++++++++++-------------- 1 file changed, 56 insertions(+), 35 deletions(-) diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs index a9ad4b476..dc1a625ed 100644 --- a/crates/ra_ide/src/extend_selection.rs +++ b/crates/ra_ide/src/extend_selection.rs @@ -110,48 +110,69 @@ fn extend_tokens_from_range( macro_call: ast::MacroCall, original_range: TextRange, ) -> Option { - // compute original mapped token range - let mut expanded = None; - let range = macro_call + // Find all non-whitespace tokens under MacroCall + let all_tokens: Vec<_> = macro_call .syntax() .descendants_with_tokens() - .filter_map(|n| match n { - NodeOrToken::Token(token) if token.text_range().is_subrange(&original_range) => { - let node = descend_into_macros(db, file_id, token); - match node.file_id { - it if it == file_id.into() => None, - it if expanded.is_none() || expanded == Some(it) => { - expanded = Some(it.into()); - Some(node.value.text_range()) - } - _ => None, - } + .filter_map(|n| { + let token = n.as_token()?; + if token.kind() == WHITESPACE { + None + } else { + Some(token.clone()) } - _ => None, }) - .fold1(|x, y| union_range(x, y))?; - - let expanded = expanded?; - let src = db.parse_or_expand(expanded)?; - let parent = shallowest_node(&find_covering_element(&src, range))?.parent()?; - // compute parent mapped token range - let range = macro_call - .syntax() - .descendants_with_tokens() - .filter_map(|n| match n { - NodeOrToken::Token(token) => { - let node = descend_into_macros(db, file_id, token.clone()); - if node.file_id == expanded - && node.value.text_range().is_subrange(&parent.text_range()) - { - Some(token.text_range()) + .sorted_by(|a, b| Ord::cmp(&a.text_range().start(), &b.text_range().start())) + .collect(); + + // Get all indices which is in original range + let indices: Vec<_> = + all_tokens + .iter() + .enumerate() + .filter_map(|(i, token)| { + if token.text_range().is_subrange(&original_range) { + Some(i) } else { None } - } - _ => None, - }) - .fold1(|x, y| union_range(x, y))?; + }) + .collect(); + + // Compute the first and last token index in original_range + let first_idx = *indices.iter().min_by_key(|&&idx| all_tokens[idx].text_range().start())?; + let last_idx = *indices.iter().max_by_key(|&&idx| all_tokens[idx].text_range().end())?; + + // compute original mapped token range + let expanded = { + let first_node = descend_into_macros(db, file_id, all_tokens[first_idx].clone()); + let first_node = first_node.map(|it| it.text_range()); + + let last_node = descend_into_macros(db, file_id, all_tokens[last_idx].clone()); + if last_node.file_id == file_id.into() || first_node.file_id != last_node.file_id { + return None; + } + first_node.map(|it| union_range(it, last_node.value.text_range())) + }; + + // Compute parent node range + let src = db.parse_or_expand(expanded.file_id)?; + let parent = shallowest_node(&find_covering_element(&src, expanded.value))?.parent()?; + + let validate = |&idx: &usize| { + let token: &SyntaxToken = &all_tokens[idx]; + let node = descend_into_macros(db, file_id, token.clone()); + + node.file_id == expanded.file_id + && node.value.text_range().is_subrange(&parent.text_range()) + }; + + // Find the first and last text range under expanded parent + let first = (0..=first_idx).rev().take_while(validate).last()?; + let last = (last_idx..all_tokens.len()).take_while(validate).last()?; + + let range = union_range(all_tokens[first].text_range(), all_tokens[last].text_range()); + if original_range.is_subrange(&range) && original_range != range { Some(range) } else { -- cgit v1.2.3 From b7ab0792114fe66c61c921b08f6262123fb8ddd0 Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Thu, 9 Jan 2020 05:06:15 +0800 Subject: Use indices first and last instead of min-max --- crates/ra_ide/src/extend_selection.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs index dc1a625ed..8048c7be9 100644 --- a/crates/ra_ide/src/extend_selection.rs +++ b/crates/ra_ide/src/extend_selection.rs @@ -139,9 +139,10 @@ fn extend_tokens_from_range( }) .collect(); - // Compute the first and last token index in original_range - let first_idx = *indices.iter().min_by_key(|&&idx| all_tokens[idx].text_range().start())?; - let last_idx = *indices.iter().max_by_key(|&&idx| all_tokens[idx].text_range().end())?; + // The first and last token index in original_range + // Note that the indices is sorted + let first_idx = *indices.first()?; + let last_idx = *indices.last()?; // compute original mapped token range let expanded = { -- cgit v1.2.3 From 384e1ced885eccc43c27153c7fe0d7b181616a0b Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Fri, 10 Jan 2020 20:36:09 +0800 Subject: Use prev_token and next_token --- crates/ra_ide/src/extend_selection.rs | 91 ++++++++++++++++++----------------- 1 file changed, 47 insertions(+), 44 deletions(-) diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs index 8048c7be9..9b6bbe82d 100644 --- a/crates/ra_ide/src/extend_selection.rs +++ b/crates/ra_ide/src/extend_selection.rs @@ -11,7 +11,7 @@ use ra_syntax::{ use crate::{db::RootDatabase, expand::descend_into_macros, FileId, FileRange}; use hir::db::AstDatabase; -use itertools::Itertools; +use std::iter::successors; pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { let src = db.parse(frange.file_id).tree(); @@ -110,46 +110,28 @@ fn extend_tokens_from_range( macro_call: ast::MacroCall, original_range: TextRange, ) -> Option { - // Find all non-whitespace tokens under MacroCall - let all_tokens: Vec<_> = macro_call - .syntax() - .descendants_with_tokens() - .filter_map(|n| { - let token = n.as_token()?; - if token.kind() == WHITESPACE { - None - } else { - Some(token.clone()) - } - }) - .sorted_by(|a, b| Ord::cmp(&a.text_range().start(), &b.text_range().start())) - .collect(); - - // Get all indices which is in original range - let indices: Vec<_> = - all_tokens - .iter() - .enumerate() - .filter_map(|(i, token)| { - if token.text_range().is_subrange(&original_range) { - Some(i) - } else { - None - } - }) - .collect(); + let src = find_covering_element(¯o_call.syntax(), original_range); + let (first_token, last_token) = match src { + NodeOrToken::Node(it) => (it.first_token()?, it.last_token()?), + NodeOrToken::Token(it) => (it.clone(), it), + }; + + let mut first_token = skip_whitespace(first_token, Direction::Next)?; + let mut last_token = skip_whitespace(last_token, Direction::Prev)?; - // The first and last token index in original_range - // Note that the indices is sorted - let first_idx = *indices.first()?; - let last_idx = *indices.last()?; + while !first_token.text_range().is_subrange(&original_range) { + first_token = skip_whitespace(first_token.next_token()?, Direction::Next)?; + } + while !last_token.text_range().is_subrange(&original_range) { + last_token = skip_whitespace(last_token.prev_token()?, Direction::Prev)?; + } // compute original mapped token range let expanded = { - let first_node = descend_into_macros(db, file_id, all_tokens[first_idx].clone()); + let first_node = descend_into_macros(db, file_id, first_token.clone()); let first_node = first_node.map(|it| it.text_range()); - let last_node = descend_into_macros(db, file_id, all_tokens[last_idx].clone()); + let last_node = descend_into_macros(db, file_id, last_token.clone()); if last_node.file_id == file_id.into() || first_node.file_id != last_node.file_id { return None; } @@ -160,20 +142,28 @@ fn extend_tokens_from_range( let src = db.parse_or_expand(expanded.file_id)?; let parent = shallowest_node(&find_covering_element(&src, expanded.value))?.parent()?; - let validate = |&idx: &usize| { - let token: &SyntaxToken = &all_tokens[idx]; + let validate = |token: SyntaxToken| { let node = descend_into_macros(db, file_id, token.clone()); - - node.file_id == expanded.file_id + if node.file_id == expanded.file_id && node.value.text_range().is_subrange(&parent.text_range()) + { + Some(token) + } else { + None + } }; // Find the first and last text range under expanded parent - let first = (0..=first_idx).rev().take_while(validate).last()?; - let last = (last_idx..all_tokens.len()).take_while(validate).last()?; - - let range = union_range(all_tokens[first].text_range(), all_tokens[last].text_range()); - + let first = successors(Some(first_token), |token| { + validate(skip_whitespace(token.prev_token()?, Direction::Prev)?) + }) + .last()?; + let last = successors(Some(last_token), |token| { + validate(skip_whitespace(token.next_token()?, Direction::Next)?) + }) + .last()?; + + let range = union_range(first.text_range(), last.text_range()); if original_range.is_subrange(&range) && original_range != range { Some(range) } else { @@ -181,6 +171,19 @@ fn extend_tokens_from_range( } } +fn skip_whitespace( + mut token: SyntaxToken, + direction: Direction, +) -> Option { + while token.kind() == WHITESPACE { + token = match direction { + Direction::Next => token.next_token()?, + Direction::Prev => token.prev_token()?, + } + } + Some(token) +} + fn union_range(range: TextRange, r: TextRange) -> TextRange { let start = range.start().min(r.start()); let end = range.end().max(r.end()); -- cgit v1.2.3 From 0593da9a36667e7780b62c6e2403497e7262bafe Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Fri, 10 Jan 2020 20:39:05 +0800 Subject: Fix format --- crates/ra_ide/src/extend_selection.rs | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs index 9b6bbe82d..70b6fde82 100644 --- a/crates/ra_ide/src/extend_selection.rs +++ b/crates/ra_ide/src/extend_selection.rs @@ -171,14 +171,11 @@ fn extend_tokens_from_range( } } -fn skip_whitespace( - mut token: SyntaxToken, - direction: Direction, -) -> Option { +fn skip_whitespace(mut token: SyntaxToken, direction: Direction) -> Option { while token.kind() == WHITESPACE { token = match direction { Direction::Next => token.next_token()?, - Direction::Prev => token.prev_token()?, + Direction::Prev => token.prev_token()?, } } Some(token) -- cgit v1.2.3