From 61f3a438d3a729a6be941bca1ff4c6a97a33f221 Mon Sep 17 00:00:00 2001 From: "Jeremy A. Kolb" Date: Mon, 15 Oct 2018 17:44:23 -0400 Subject: Cargo Format Run `cargo fmt` and ignore generated files --- crates/ra_editor/src/code_actions.rs | 49 +++++--- crates/ra_editor/src/completion.rs | 178 +++++++++++++++++---------- crates/ra_editor/src/edit.rs | 13 +- crates/ra_editor/src/extend_selection.rs | 58 ++++----- crates/ra_editor/src/folding_ranges.rs | 36 +++--- crates/ra_editor/src/lib.rs | 103 ++++++++-------- crates/ra_editor/src/line_index.rs | 119 +++++++++++++++--- crates/ra_editor/src/scope/fn_scope.rs | 150 +++++++++++++--------- crates/ra_editor/src/scope/mod.rs | 3 +- crates/ra_editor/src/scope/mod_scope.rs | 47 +++---- crates/ra_editor/src/symbols.rs | 34 +++-- crates/ra_editor/src/test_utils.rs | 12 +- crates/ra_editor/src/typing.rs | 205 ++++++++++++++++++------------- 13 files changed, 612 insertions(+), 395 deletions(-) (limited to 'crates/ra_editor/src') diff --git a/crates/ra_editor/src/code_actions.rs b/crates/ra_editor/src/code_actions.rs index 7b0a48c81..cadcd2720 100644 --- a/crates/ra_editor/src/code_actions.rs +++ b/crates/ra_editor/src/code_actions.rs @@ -1,17 +1,14 @@ use join_to_string::join; use ra_syntax::{ - File, TextUnit, TextRange, Direction, - ast::{self, AstNode, AttrsOwner, TypeParamsOwner, NameOwner}, + algo::{find_covering_node, find_leaf_at_offset}, + ast::{self, AstNode, AttrsOwner, NameOwner, TypeParamsOwner}, + Direction, File, SyntaxKind::{COMMA, WHITESPACE}, - SyntaxNodeRef, - algo::{ - find_leaf_at_offset, - find_covering_node, - }, + SyntaxNodeRef, TextRange, TextUnit, }; -use crate::{EditBuilder, Edit, find_node_at_offset}; +use crate::{find_node_at_offset, Edit, EditBuilder}; #[derive(Debug)] pub struct LocalEdit { @@ -52,9 +49,7 @@ pub fn add_derive<'a>(file: &'a File, offset: TextUnit) -> Option { - tt.syntax().range().end() - TextUnit::of_char(')') - } + Some(tt) => tt.syntax().range().end() - TextUnit::of_char(')'), }; LocalEdit { edit: edit.finish(), @@ -74,14 +69,19 @@ pub fn add_impl<'a>(file: &'a File, offset: TextUnit) -> Option let mut buf = String::new(); buf.push_str("\n\nimpl"); if let Some(type_params) = type_params { - type_params.syntax().text() - .push_to(&mut buf); + type_params.syntax().text().push_to(&mut buf); } buf.push_str(" "); buf.push_str(name.text().as_str()); if let Some(type_params) = type_params { - let lifetime_params = type_params.lifetime_params().filter_map(|it| it.lifetime()).map(|it| it.text()); - let type_params = type_params.type_params().filter_map(|it| it.name()).map(|it| it.text()); + let lifetime_params = type_params + .lifetime_params() + .filter_map(|it| it.lifetime()) + .map(|it| it.text()); + let type_params = type_params + .type_params() + .filter_map(|it| it.name()) + .map(|it| it.text()); join(lifetime_params.chain(type_params)) .surround_with("<", ">") .to_buf(&mut buf); @@ -97,10 +97,17 @@ pub fn add_impl<'a>(file: &'a File, offset: TextUnit) -> Option }) } -pub fn introduce_variable<'a>(file: &'a File, range: TextRange) -> Option LocalEdit + 'a> { +pub fn introduce_variable<'a>( + file: &'a File, + range: TextRange, +) -> Option LocalEdit + 'a> { let node = find_covering_node(file.syntax(), range); let expr = node.ancestors().filter_map(ast::Expr::cast).next()?; - let anchor_stmt = expr.syntax().ancestors().filter_map(ast::Stmt::cast).next()?; + let anchor_stmt = expr + .syntax() + .ancestors() + .filter_map(ast::Stmt::cast) + .next()?; let indent = anchor_stmt.syntax().prev_sibling()?; if indent.kind() != WHITESPACE { return None; @@ -191,7 +198,8 @@ mod tests { " fn foo() { foo(<|>1 + 1<|>); -}", " +}", + " fn foo() { let <|>var_name = 1 + 1; foo(var_name); @@ -201,11 +209,12 @@ fn foo() { } #[test] fn test_intrdoduce_var_expr_stmt() { -check_action_range( + check_action_range( " fn foo() { <|>1 + 1<|>; -}", " +}", + " fn foo() { let <|>var_name = 1 + 1; }", diff --git a/crates/ra_editor/src/completion.rs b/crates/ra_editor/src/completion.rs index b6095dca9..86ef46ebd 100644 --- a/crates/ra_editor/src/completion.rs +++ b/crates/ra_editor/src/completion.rs @@ -1,17 +1,18 @@ use rustc_hash::{FxHashMap, FxHashSet}; use ra_syntax::{ - File, TextUnit, AstNode, SyntaxNodeRef, SyntaxKind::*, + algo::visit::{visitor, visitor_ctx, Visitor, VisitorCtx}, ast::{self, LoopBodyOwner, ModuleItemOwner}, - algo::{ - visit::{visitor, Visitor, visitor_ctx, VisitorCtx}, - }, text_utils::is_subrange, + AstNode, File, + SyntaxKind::*, + SyntaxNodeRef, TextUnit, }; use crate::{ - AtomEdit, find_node_at_offset, + find_node_at_offset, scope::{FnScopes, ModuleScope}, + AtomEdit, }; #[derive(Debug)] @@ -21,7 +22,7 @@ pub struct CompletionItem { /// What string is used for filtering, defaults to label pub lookup: Option, /// What is inserted, defaults to label - pub snippet: Option + pub snippet: Option, } pub fn scope_completion(file: &File, offset: TextUnit) -> Option> { @@ -40,7 +41,12 @@ pub fn scope_completion(file: &File, offset: TextUnit) -> Option complete_mod_item_snippets(&mut res), _ => (), @@ -68,21 +74,23 @@ fn complete_name_ref(file: &File, name_ref: ast::NameRef, acc: &mut Vec(|it| Some(it.items())) .visit::(|it| Some(it.item_list()?.items())) - .accept(node) { + .accept(node) + { if let Some(items) = items { let scope = ModuleScope::new(items); acc.extend( - scope.entries().iter() + scope + .entries() + .iter() .filter(|entry| entry.syntax() != name_ref.syntax()) .map(|entry| CompletionItem { label: entry.name().to_string(), lookup: None, snippet: None, - }) + }), ); } break; - } else if !visited_fn { if let Some(fn_def) = ast::FnDef::cast(node) { visited_fn = true; @@ -103,26 +111,34 @@ fn param_completions(ctx: SyntaxNodeRef, acc: &mut Vec) { .visit::(process) .accept(node); } - params.into_iter() + params + .into_iter() .filter_map(|(label, (count, param))| { let lookup = param.pat()?.syntax().text().to_string(); - if count < 2 { None } else { Some((label, lookup)) } + if count < 2 { + None + } else { + Some((label, lookup)) + } }) .for_each(|(label, lookup)| { acc.push(CompletionItem { - label, lookup: Some(lookup), snippet: None + label, + lookup: Some(lookup), + snippet: None, }) }); - fn process<'a, N: ast::FnDefOwner<'a>>(node: N, params: &mut FxHashMap)>) { + fn process<'a, N: ast::FnDefOwner<'a>>( + node: N, + params: &mut FxHashMap)>, + ) { node.functions() .filter_map(|it| it.param_list()) .flat_map(|it| it.params()) .for_each(|param| { let text = param.syntax().text().to_string(); - params.entry(text) - .or_insert((0, param)) - .0 += 1; + params.entry(text).or_insert((0, param)).0 += 1; }) } } @@ -134,8 +150,12 @@ fn is_node<'a, N: AstNode<'a>>(node: SyntaxNodeRef<'a>) -> bool { } } - -fn complete_expr_keywords(file: &File, fn_def: ast::FnDef, name_ref: ast::NameRef, acc: &mut Vec) { +fn complete_expr_keywords( + file: &File, + fn_def: ast::FnDef, + name_ref: ast::NameRef, + acc: &mut Vec, +) { acc.push(keyword("if", "if $0 {}")); acc.push(keyword("match", "match $0 {}")); acc.push(keyword("while", "while $0 {}")); @@ -186,9 +206,14 @@ fn complete_return(fn_def: ast::FnDef, name_ref: ast::NameRef) -> Option false, - Some(expr_stmt) => expr_stmt.syntax().range() == name_ref.syntax().range() + Some(expr_stmt) => expr_stmt.syntax().range() == name_ref.syntax().range(), }; let snip = match (is_stmt, fn_def.ret_type().is_some()) { (true, true) => "return $0;", @@ -209,39 +234,37 @@ fn keyword(kw: &str, snip: &str) -> CompletionItem { fn complete_expr_snippets(acc: &mut Vec) { acc.push(CompletionItem { - label: "pd".to_string(), - lookup: None, - snippet: Some("eprintln!(\"$0 = {:?}\", $0);".to_string()), - } - ); + label: "pd".to_string(), + lookup: None, + snippet: Some("eprintln!(\"$0 = {:?}\", $0);".to_string()), + }); acc.push(CompletionItem { - label: "ppd".to_string(), - lookup: None, - snippet: Some("eprintln!(\"$0 = {:#?}\", $0);".to_string()), - } - ); + label: "ppd".to_string(), + lookup: None, + snippet: Some("eprintln!(\"$0 = {:#?}\", $0);".to_string()), + }); } fn complete_mod_item_snippets(acc: &mut Vec) { acc.push(CompletionItem { - label: "tfn".to_string(), - lookup: None, - snippet: Some("#[test]\nfn $1() {\n $0\n}".to_string()), - } - ); + label: "tfn".to_string(), + lookup: None, + snippet: Some("#[test]\nfn $1() {\n $0\n}".to_string()), + }); } fn complete_fn(name_ref: ast::NameRef, scopes: &FnScopes, acc: &mut Vec) { let mut shadowed = FxHashSet::default(); acc.extend( - scopes.scope_chain(name_ref.syntax()) + scopes + .scope_chain(name_ref.syntax()) .flat_map(|scope| scopes.entries(scope).iter()) .filter(|entry| shadowed.insert(entry.name())) .map(|entry| CompletionItem { label: entry.name().to_string(), lookup: None, snippet: None, - }) + }), ); if scopes.self_param.is_some() { acc.push(CompletionItem { @@ -281,20 +304,24 @@ mod tests { #[test] fn test_completion_let_scope() { - check_scope_completion(r" + check_scope_completion( + r" fn quux(x: i32) { let y = 92; 1 + <|>; let z = (); } - ", r#"[CompletionItem { label: "y", lookup: None, snippet: None }, + ", + r#"[CompletionItem { label: "y", lookup: None, snippet: None }, CompletionItem { label: "x", lookup: None, snippet: None }, - CompletionItem { label: "quux", lookup: None, snippet: None }]"#); + CompletionItem { label: "quux", lookup: None, snippet: None }]"#, + ); } #[test] fn test_completion_if_let_scope() { - check_scope_completion(r" + check_scope_completion( + r" fn quux() { if let Some(x) = foo() { let y = 92; @@ -304,67 +331,85 @@ mod tests { 1 + <|> } } - ", r#"[CompletionItem { label: "b", lookup: None, snippet: None }, + ", + r#"[CompletionItem { label: "b", lookup: None, snippet: None }, CompletionItem { label: "a", lookup: None, snippet: None }, - CompletionItem { label: "quux", lookup: None, snippet: None }]"#); + CompletionItem { label: "quux", lookup: None, snippet: None }]"#, + ); } #[test] fn test_completion_for_scope() { - check_scope_completion(r" + check_scope_completion( + r" fn quux() { for x in &[1, 2, 3] { <|> } } - ", r#"[CompletionItem { label: "x", lookup: None, snippet: None }, - CompletionItem { label: "quux", lookup: None, snippet: None }]"#); + ", + r#"[CompletionItem { label: "x", lookup: None, snippet: None }, + CompletionItem { label: "quux", lookup: None, snippet: None }]"#, + ); } #[test] fn test_completion_mod_scope() { - check_scope_completion(r" + check_scope_completion( + r" struct Foo; enum Baz {} fn quux() { <|> } - ", r#"[CompletionItem { label: "Foo", lookup: None, snippet: None }, + ", + r#"[CompletionItem { label: "Foo", lookup: None, snippet: None }, CompletionItem { label: "Baz", lookup: None, snippet: None }, - CompletionItem { label: "quux", lookup: None, snippet: None }]"#); + CompletionItem { label: "quux", lookup: None, snippet: None }]"#, + ); } #[test] fn test_completion_mod_scope_no_self_use() { - check_scope_completion(r" + check_scope_completion( + r" use foo<|>; - ", r#"[]"#); + ", + r#"[]"#, + ); } #[test] fn test_completion_mod_scope_nested() { - check_scope_completion(r" + check_scope_completion( + r" struct Foo; mod m { struct Bar; fn quux() { <|> } } - ", r#"[CompletionItem { label: "Bar", lookup: None, snippet: None }, - CompletionItem { label: "quux", lookup: None, snippet: None }]"#); + ", + r#"[CompletionItem { label: "Bar", lookup: None, snippet: None }, + CompletionItem { label: "quux", lookup: None, snippet: None }]"#, + ); } #[test] fn test_complete_type() { - check_scope_completion(r" + check_scope_completion( + r" struct Foo; fn x() -> <|> - ", r#"[CompletionItem { label: "Foo", lookup: None, snippet: None }, - CompletionItem { label: "x", lookup: None, snippet: None }]"#) + ", + r#"[CompletionItem { label: "Foo", lookup: None, snippet: None }, + CompletionItem { label: "x", lookup: None, snippet: None }]"#, + ) } #[test] fn test_complete_shadowing() { - check_scope_completion(r" + check_scope_completion( + r" fn foo() -> { let bar = 92; { @@ -372,15 +417,20 @@ mod tests { <|> } } - ", r#"[CompletionItem { label: "bar", lookup: None, snippet: None }, - CompletionItem { label: "foo", lookup: None, snippet: None }]"#) + ", + r#"[CompletionItem { label: "bar", lookup: None, snippet: None }, + CompletionItem { label: "foo", lookup: None, snippet: None }]"#, + ) } #[test] fn test_complete_self() { - check_scope_completion(r" + check_scope_completion( + r" impl S { fn foo(&self) { <|> } } - ", r#"[CompletionItem { label: "self", lookup: None, snippet: None }]"#) + ", + r#"[CompletionItem { label: "self", lookup: None, snippet: None }]"#, + ) } #[test] diff --git a/crates/ra_editor/src/edit.rs b/crates/ra_editor/src/edit.rs index 46e687319..c3149ec54 100644 --- a/crates/ra_editor/src/edit.rs +++ b/crates/ra_editor/src/edit.rs @@ -1,8 +1,5 @@ use crate::{TextRange, TextUnit}; -use ra_syntax::{ - AtomEdit, - text_utils::contains_offset_nonstrict, -}; +use ra_syntax::{text_utils::contains_offset_nonstrict, AtomEdit}; #[derive(Debug, Clone)] pub struct Edit { @@ -11,7 +8,7 @@ pub struct Edit { #[derive(Debug)] pub struct EditBuilder { - atoms: Vec + atoms: Vec, } impl EditBuilder { @@ -36,7 +33,9 @@ impl EditBuilder { Edit { atoms } } pub fn invalidates_offset(&self, offset: TextUnit) -> bool { - self.atoms.iter().any(|atom| contains_offset_nonstrict(atom.delete, offset)) + self.atoms + .iter() + .any(|atom| contains_offset_nonstrict(atom.delete, offset)) } } @@ -74,7 +73,7 @@ impl Edit { break; } if offset < atom.delete.end() { - return None + return None; } res += TextUnit::of_str(&atom.insert); res -= atom.delete.len(); diff --git a/crates/ra_editor/src/extend_selection.rs b/crates/ra_editor/src/extend_selection.rs index ab03a717e..9ee1df281 100644 --- a/crates/ra_editor/src/extend_selection.rs +++ b/crates/ra_editor/src/extend_selection.rs @@ -1,7 +1,8 @@ use ra_syntax::{ - File, TextRange, SyntaxNodeRef, TextUnit, Direction, + algo::{find_covering_node, find_leaf_at_offset, LeafAtOffset}, + Direction, File, SyntaxKind::*, - algo::{find_leaf_at_offset, LeafAtOffset, find_covering_node}, + SyntaxNodeRef, TextRange, TextUnit, }; pub fn extend_selection(file: &File, range: TextRange) -> Option { @@ -20,11 +21,11 @@ pub(crate) fn extend(root: SyntaxNodeRef, range: TextRange) -> Option LeafAtOffset::None => return None, LeafAtOffset::Single(l) => { if l.kind() == COMMENT { - extend_single_word_in_comment(l, offset).unwrap_or_else(||l.range()) + extend_single_word_in_comment(l, offset).unwrap_or_else(|| l.range()) } else { l.range() } - }, + } LeafAtOffset::Between(l, r) => pick_best(l, r).range(), }; return Some(leaf_range); @@ -66,7 +67,7 @@ fn extend_ws(root: SyntaxNodeRef, ws: SyntaxNodeRef, offset: TextUnit) -> TextRa if let Some(node) = ws.next_sibling() { let start = match ws_prefix.rfind('\n') { Some(idx) => ws.range().start() + TextUnit::from((idx + 1) as u32), - None => node.range().start() + None => node.range().start(), }; let end = if root.text().char_at(node.range().end()) == Some('\n') { node.range().end() + TextUnit::of_char('\n') @@ -94,10 +95,7 @@ fn extend_comments(node: SyntaxNodeRef) -> Option { let prev = adj_comments(node, Direction::Prev); let next = adj_comments(node, Direction::Next); if prev != next { - Some(TextRange::from_to( - prev.range().start(), - next.range().end(), - )) + Some(TextRange::from_to(prev.range().start(), next.range().end())) } else { None } @@ -109,7 +107,7 @@ fn adj_comments(node: SyntaxNodeRef, dir: Direction) -> SyntaxNodeRef { match node.kind() { COMMENT => res = node, WHITESPACE if !node.leaf_text().unwrap().as_str().contains("\n\n") => (), - _ => break + _ => break, } } res @@ -125,8 +123,7 @@ mod tests { let file = File::parse(&before); let mut range = TextRange::offset_len(cursor, 0.into()); for &after in afters { - range = extend_selection(&file, range) - .unwrap(); + range = extend_selection(&file, range).unwrap(); let actual = &before[range]; assert_eq!(after, actual); } @@ -134,10 +131,7 @@ mod tests { #[test] fn test_extend_selection_arith() { - do_check( - r#"fn foo() { <|>1 + 1 }"#, - &["1", "1 + 1", "{ 1 + 1 }"], - ); + do_check(r#"fn foo() { <|>1 + 1 }"#, &["1", "1 + 1", "{ 1 + 1 }"]); } #[test] @@ -149,7 +143,7 @@ impl S { } }"#, - &[" fn foo() {\n\n }\n"] + &[" fn foo() {\n\n }\n"], ); } @@ -165,7 +159,11 @@ struct B { <|> } "#, - &["\n \n", "{\n \n}", "/// bla\n/// bla\nstruct B {\n \n}"] + &[ + "\n \n", + "{\n \n}", + "/// bla\n/// bla\nstruct B {\n \n}", + ], ) } @@ -181,7 +179,7 @@ fn bar(){} // fn foo(){} "#, - &["// 1 + 1", "// fn foo() {\n// 1 + 1\n// }"] + &["// 1 + 1", "// fn foo() {\n// 1 + 1\n// }"], ); } @@ -191,42 +189,34 @@ fn bar(){} r#" fn main() { foo<|>+bar;} "#, - &["foo", "foo+bar"] + &["foo", "foo+bar"], ); do_check( r#" fn main() { foo+<|>bar;} "#, - &["bar", "foo+bar"] + &["bar", "foo+bar"], ); } #[test] fn test_extend_selection_prefer_lifetimes() { - do_check( - r#"fn foo<<|>'a>() {}"#, - &["'a", "<'a>"] - ); - do_check( - r#"fn foo<'a<|>>() {}"#, - &["'a", "<'a>"] - ); + do_check(r#"fn foo<<|>'a>() {}"#, &["'a", "<'a>"]); + do_check(r#"fn foo<'a<|>>() {}"#, &["'a", "<'a>"]); } #[test] fn test_extend_selection_select_first_word() { + do_check(r#"// foo bar b<|>az quxx"#, &["baz", "// foo bar baz quxx"]); do_check( - r#"// foo bar b<|>az quxx"#, - &["baz", "// foo bar baz quxx"] - ); - do_check(r#" + r#" impl S { fn foo() { // hel<|>lo world } } "#, - &["hello", "// hello world"] + &["hello", "// hello world"], ); } } diff --git a/crates/ra_editor/src/folding_ranges.rs b/crates/ra_editor/src/folding_ranges.rs index a1699d449..e5bc0c4ee 100644 --- a/crates/ra_editor/src/folding_ranges.rs +++ b/crates/ra_editor/src/folding_ranges.rs @@ -1,11 +1,9 @@ use rustc_hash::FxHashSet; use ra_syntax::{ - ast, - AstNode, - File, TextRange, SyntaxNodeRef, + ast, AstNode, Direction, File, SyntaxKind::{self, *}, - Direction, + SyntaxNodeRef, TextRange, }; #[derive(Debug, PartialEq, Eq)] @@ -28,7 +26,10 @@ pub fn folding_ranges(file: &File) -> Vec { // Fold items that span multiple lines if let Some(kind) = fold_kind(node.kind()) { if has_newline(node) { - res.push(Fold { range: node.range(), kind }); + res.push(Fold { + range: node.range(), + kind, + }); } } @@ -37,8 +38,12 @@ pub fn folding_ranges(file: &File) -> Vec { continue; } if node.kind() == COMMENT { - contiguous_range_for_comment(node, &mut visited_comments) - .map(|range| res.push(Fold { range, kind: FoldKind::Comment })); + contiguous_range_for_comment(node, &mut visited_comments).map(|range| { + res.push(Fold { + range, + kind: FoldKind::Comment, + }) + }); } } @@ -49,13 +54,11 @@ fn fold_kind(kind: SyntaxKind) -> Option { match kind { COMMENT => Some(FoldKind::Comment), USE_ITEM => Some(FoldKind::Imports), - _ => None + _ => None, } } -fn has_newline( - node: SyntaxNodeRef, -) -> bool { +fn has_newline(node: SyntaxNodeRef) -> bool { for descendant in node.descendants() { if let Some(ws) = ast::Whitespace::cast(descendant) { if ws.has_newlines() { @@ -100,9 +103,7 @@ fn contiguous_range_for_comment<'a>( // The comment group ends because either: // * An element of a different kind was reached // * A comment of a different flavor was reached - _ => { - break - } + _ => break, } } @@ -128,7 +129,11 @@ mod tests { let folds = folding_ranges(&file); assert_eq!(folds.len(), ranges.len()); - for ((fold, range), fold_kind) in folds.into_iter().zip(ranges.into_iter()).zip(fold_kinds.into_iter()) { + for ((fold, range), fold_kind) in folds + .into_iter() + .zip(ranges.into_iter()) + .zip(fold_kinds.into_iter()) + { assert_eq!(fold.range.start(), range.start()); assert_eq!(fold.range.end(), range.end()); assert_eq!(&fold.kind, fold_kind); @@ -181,5 +186,4 @@ fn main() { do_check(text, folds); } - } diff --git a/crates/ra_editor/src/lib.rs b/crates/ra_editor/src/lib.rs index bd61fd191..417080d90 100644 --- a/crates/ra_editor/src/lib.rs +++ b/crates/ra_editor/src/lib.rs @@ -1,44 +1,41 @@ -extern crate ra_syntax; -extern crate superslice; extern crate itertools; extern crate join_to_string; +extern crate ra_syntax; extern crate rustc_hash; +extern crate superslice; #[cfg(test)] #[macro_use] extern crate test_utils as _test_utils; -mod extend_selection; -mod symbols; -mod line_index; -mod edit; -mod folding_ranges; mod code_actions; -mod typing; mod completion; +mod edit; +mod extend_selection; +mod folding_ranges; +mod line_index; mod scope; +mod symbols; #[cfg(test)] mod test_utils; +mod typing; +pub use self::{ + code_actions::{add_derive, add_impl, flip_comma, introduce_variable, LocalEdit}, + completion::{scope_completion, CompletionItem}, + edit::{Edit, EditBuilder}, + extend_selection::extend_selection, + folding_ranges::{folding_ranges, Fold, FoldKind}, + line_index::{LineCol, LineIndex}, + symbols::{file_structure, file_symbols, FileSymbol, StructureNode}, + typing::{join_lines, on_enter, on_eq_typed}, +}; +pub use ra_syntax::AtomEdit; use ra_syntax::{ - File, TextUnit, TextRange, SmolStr, SyntaxNodeRef, - ast::{self, AstNode, NameOwner}, algo::find_leaf_at_offset, + ast::{self, AstNode, NameOwner}, + File, SmolStr, SyntaxKind::{self, *}, -}; -pub use ra_syntax::AtomEdit; -pub use self::{ - line_index::{LineIndex, LineCol}, - extend_selection::extend_selection, - symbols::{StructureNode, file_structure, FileSymbol, file_symbols}, - edit::{EditBuilder, Edit}, - code_actions::{ - LocalEdit, - flip_comma, add_derive, add_impl, - introduce_variable, - }, - typing::{join_lines, on_eq_typed, on_enter}, - completion::{scope_completion, CompletionItem}, - folding_ranges::{Fold, FoldKind, folding_ranges} + SyntaxNodeRef, TextRange, TextUnit, }; #[derive(Debug)] @@ -67,10 +64,7 @@ pub enum RunnableKind { pub fn matching_brace(file: &File, offset: TextUnit) -> Option { const BRACES: &[SyntaxKind] = &[ - L_CURLY, R_CURLY, - L_BRACK, R_BRACK, - L_PAREN, R_PAREN, - L_ANGLE, R_ANGLE, + L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE, ]; let (brace_node, brace_idx) = find_leaf_at_offset(file.syntax(), offset) .filter_map(|node| { @@ -80,7 +74,8 @@ pub fn matching_brace(file: &File, offset: TextUnit) -> Option { .next()?; let parent = brace_node.parent()?; let matching_kind = BRACES[brace_idx ^ 1]; - let matching_node = parent.children() + let matching_node = parent + .children() .find(|node| node.kind() == matching_kind)?; Some(matching_node.range().start()) } @@ -108,10 +103,13 @@ pub fn highlight(file: &File) -> Vec { } pub fn diagnostics(file: &File) -> Vec { - file.errors().into_iter().map(|err| Diagnostic { - range: TextRange::offset_len(err.offset, 1.into()), - msg: "Syntax Error: ".to_string() + &err.msg, - }).collect() + file.errors() + .into_iter() + .map(|err| Diagnostic { + range: TextRange::offset_len(err.offset, 1.into()), + msg: "Syntax Error: ".to_string() + &err.msg, + }) + .collect() } pub fn syntax_tree(file: &File) -> String { @@ -119,7 +117,8 @@ pub fn syntax_tree(file: &File) -> String { } pub fn runnables(file: &File) -> Vec { - file.syntax().descendants() + file.syntax() + .descendants() .filter_map(ast::FnDef::cast) .filter_map(|f| { let name = f.name()?.text(); @@ -127,7 +126,7 @@ pub fn runnables(file: &File) -> Vec { RunnableKind::Bin } else if f.has_atom_attr("test") { RunnableKind::Test { - name: name.to_string() + name: name.to_string(), } } else { return None; @@ -145,15 +144,18 @@ pub fn find_node_at_offset<'a, N: AstNode<'a>>( offset: TextUnit, ) -> Option { let leaves = find_leaf_at_offset(syntax, offset); - let leaf = leaves.clone() + let leaf = leaves + .clone() .find(|leaf| !leaf.kind().is_trivia()) .or_else(|| leaves.right_biased())?; - leaf.ancestors() - .filter_map(N::cast) - .next() + leaf.ancestors().filter_map(N::cast).next() } -pub fn resolve_local_name(file: &File, offset: TextUnit, name_ref: ast::NameRef) -> Option<(SmolStr, TextRange)> { +pub fn resolve_local_name( + file: &File, + offset: TextUnit, + name_ref: ast::NameRef, +) -> Option<(SmolStr, TextRange)> { let fn_def = find_node_at_offset::(file.syntax(), offset)?; let scopes = scope::FnScopes::new(fn_def); let scope_entry = scope::resolve_local_name(name_ref, &scopes)?; @@ -164,15 +166,17 @@ pub fn resolve_local_name(file: &File, offset: TextUnit, name_ref: ast::NameRef) #[cfg(test)] mod tests { use super::*; - use crate::test_utils::{assert_eq_dbg, extract_offset, add_cursor}; + use crate::test_utils::{add_cursor, assert_eq_dbg, extract_offset}; #[test] fn test_highlighting() { - let file = File::parse(r#" + let file = File::parse( + r#" // comment fn main() {} println!("Hello, {}!", 92); -"#); +"#, + ); let hls = highlight(&file); assert_eq_dbg( r#"[HighlightedRange { range: [1; 11), tag: "comment" }, @@ -187,7 +191,8 @@ fn main() {} #[test] fn test_runnables() { - let file = File::parse(r#" + let file = File::parse( + r#" fn main() {} #[test] @@ -196,7 +201,8 @@ fn test_foo() {} #[test] #[ignore] fn test_foo() {} -"#); +"#, + ); let runnables = runnables(&file); assert_eq_dbg( r#"[Runnable { range: [1; 13), kind: Bin }, @@ -219,9 +225,6 @@ fn test_foo() {} assert_eq_text!(after, &actual); } - do_check( - "struct Foo { a: i32, }<|>", - "struct Foo <|>{ a: i32, }", - ); + do_check("struct Foo { a: i32, }<|>", "struct Foo <|>{ a: i32, }"); } } diff --git a/crates/ra_editor/src/line_index.rs b/crates/ra_editor/src/line_index.rs index 95d64b8a8..da0f2a7f7 100644 --- a/crates/ra_editor/src/line_index.rs +++ b/crates/ra_editor/src/line_index.rs @@ -1,5 +1,5 @@ -use superslice::Ext; use crate::TextUnit; +use superslice::Ext; #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct LineIndex { @@ -29,7 +29,10 @@ impl LineIndex { let line = self.newlines.upper_bound(&offset) - 1; let line_start_offset = self.newlines[line]; let col = offset - line_start_offset; - return LineCol { line: line as u32, col }; + return LineCol { + line: line as u32, + col, + }; } pub fn offset(&self, line_col: LineCol) -> TextUnit { @@ -42,21 +45,105 @@ impl LineIndex { fn test_line_index() { let text = "hello\nworld"; let index = LineIndex::new(text); - assert_eq!(index.line_col(0.into()), LineCol { line: 0, col: 0.into() }); - assert_eq!(index.line_col(1.into()), LineCol { line: 0, col: 1.into() }); - assert_eq!(index.line_col(5.into()), LineCol { line: 0, col: 5.into() }); - assert_eq!(index.line_col(6.into()), LineCol { line: 1, col: 0.into() }); - assert_eq!(index.line_col(7.into()), LineCol { line: 1, col: 1.into() }); - assert_eq!(index.line_col(8.into()), LineCol { line: 1, col: 2.into() }); - assert_eq!(index.line_col(10.into()), LineCol { line: 1, col: 4.into() }); - assert_eq!(index.line_col(11.into()), LineCol { line: 1, col: 5.into() }); - assert_eq!(index.line_col(12.into()), LineCol { line: 1, col: 6.into() }); + assert_eq!( + index.line_col(0.into()), + LineCol { + line: 0, + col: 0.into() + } + ); + assert_eq!( + index.line_col(1.into()), + LineCol { + line: 0, + col: 1.into() + } + ); + assert_eq!( + index.line_col(5.into()), + LineCol { + line: 0, + col: 5.into() + } + ); + assert_eq!( + index.line_col(6.into()), + LineCol { + line: 1, + col: 0.into() + } + ); + assert_eq!( + index.line_col(7.into()), + LineCol { + line: 1, + col: 1.into() + } + ); + assert_eq!( + index.line_col(8.into()), + LineCol { + line: 1, + col: 2.into() + } + ); + assert_eq!( + index.line_col(10.into()), + LineCol { + line: 1, + col: 4.into() + } + ); + assert_eq!( + index.line_col(11.into()), + LineCol { + line: 1, + col: 5.into() + } + ); + assert_eq!( + index.line_col(12.into()), + LineCol { + line: 1, + col: 6.into() + } + ); let text = "\nhello\nworld"; let index = LineIndex::new(text); - assert_eq!(index.line_col(0.into()), LineCol { line: 0, col: 0.into() }); - assert_eq!(index.line_col(1.into()), LineCol { line: 1, col: 0.into() }); - assert_eq!(index.line_col(2.into()), LineCol { line: 1, col: 1.into() }); - assert_eq!(index.line_col(6.into()), LineCol { line: 1, col: 5.into() }); - assert_eq!(index.line_col(7.into()), LineCol { line: 2, col: 0.into() }); + assert_eq!( + index.line_col(0.into()), + LineCol { + line: 0, + col: 0.into() + } + ); + assert_eq!( + index.line_col(1.into()), + LineCol { + line: 1, + col: 0.into() + } + ); + assert_eq!( + index.line_col(2.into()), + LineCol { + line: 1, + col: 1.into() + } + ); + assert_eq!( + index.line_col(6.into()), + LineCol { + line: 1, + col: 5.into() + } + ); + assert_eq!( + index.line_col(7.into()), + LineCol { + line: 2, + col: 0.into() + } + ); } diff --git a/crates/ra_editor/src/scope/fn_scope.rs b/crates/ra_editor/src/scope/fn_scope.rs index 99d698b60..9088e5a60 100644 --- a/crates/ra_editor/src/scope/fn_scope.rs +++ b/crates/ra_editor/src/scope/fn_scope.rs @@ -1,10 +1,11 @@ use std::fmt; + use rustc_hash::FxHashMap; use ra_syntax::{ - SyntaxNodeRef, SyntaxNode, SmolStr, AstNode, - ast::{self, NameOwner, LoopBodyOwner, ArgListOwner}, - algo::{generate} + algo::generate, + ast::{self, ArgListOwner, LoopBodyOwner, NameOwner}, + AstNode, SmolStr, SyntaxNode, SyntaxNodeRef, }; type ScopeId = usize; @@ -19,11 +20,12 @@ pub struct FnScopes { impl FnScopes { pub fn new(fn_def: ast::FnDef) -> FnScopes { let mut scopes = FnScopes { - self_param: fn_def.param_list() + self_param: fn_def + .param_list() .and_then(|it| it.self_param()) .map(|it| it.syntax().owned()), scopes: Vec::new(), - scope_for: FxHashMap::default() + scope_for: FxHashMap::default(), }; let root = scopes.root_scope(); scopes.add_params_bindings(root, fn_def.param_list()); @@ -35,27 +37,38 @@ impl FnScopes { pub fn entries(&self, scope: ScopeId) -> &[ScopeEntry] { &self.scopes[scope].entries } - pub fn scope_chain<'a>(&'a self, node: SyntaxNodeRef) -> impl Iterator + 'a { - generate(self.scope_for(node), move |&scope| self.scopes[scope].parent) + pub fn scope_chain<'a>(&'a self, node: SyntaxNodeRef) -> impl Iterator + 'a { + generate(self.scope_for(node), move |&scope| { + self.scopes[scope].parent + }) } fn root_scope(&mut self) -> ScopeId { let res = self.scopes.len(); - self.scopes.push(ScopeData { parent: None, entries: vec![] }); + self.scopes.push(ScopeData { + parent: None, + entries: vec![], + }); res } fn new_scope(&mut self, parent: ScopeId) -> ScopeId { let res = self.scopes.len(); - self.scopes.push(ScopeData { parent: Some(parent), entries: vec![] }); + self.scopes.push(ScopeData { + parent: Some(parent), + entries: vec![], + }); res } fn add_bindings(&mut self, scope: ScopeId, pat: ast::Pat) { - let entries = pat.syntax().descendants() + let entries = pat + .syntax() + .descendants() .filter_map(ast::BindPat::cast) .filter_map(ScopeEntry::new); self.scopes[scope].entries.extend(entries); } fn add_params_bindings(&mut self, scope: ScopeId, params: Option) { - params.into_iter() + params + .into_iter() .flat_map(|it| it.params()) .filter_map(|it| it.pat()) .for_each(|it| self.add_bindings(scope, it)); @@ -71,34 +84,33 @@ impl FnScopes { } pub struct ScopeEntry { - syntax: SyntaxNode + syntax: SyntaxNode, } impl ScopeEntry { fn new(pat: ast::BindPat) -> Option { if pat.name().is_some() { - Some(ScopeEntry { syntax: pat.syntax().owned() }) + Some(ScopeEntry { + syntax: pat.syntax().owned(), + }) } else { None } } pub fn name(&self) -> SmolStr { - self.ast().name() - .unwrap() - .text() + self.ast().name().unwrap().text() } pub fn ast(&self) -> ast::BindPat { - ast::BindPat::cast(self.syntax.borrowed()) - .unwrap() + ast::BindPat::cast(self.syntax.borrowed()).unwrap() } } impl fmt::Debug for ScopeEntry { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("ScopeEntry") - .field("name", &self.name()) - .field("syntax", &self.syntax) - .finish() + .field("name", &self.name()) + .field("syntax", &self.syntax) + .finish() } } @@ -132,16 +144,16 @@ fn compute_block_scopes(block: ast::Block, scopes: &mut FnScopes, mut scope: Sco fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) { match expr { ast::Expr::IfExpr(e) => { - let cond_scope = e.condition().and_then(|cond| { - compute_cond_scopes(cond, scopes, scope) - }); + let cond_scope = e + .condition() + .and_then(|cond| compute_cond_scopes(cond, scopes, scope)); if let Some(block) = e.then_branch() { compute_block_scopes(block, scopes, cond_scope.unwrap_or(scope)); } if let Some(block) = e.else_branch() { compute_block_scopes(block, scopes, scope); } - }, + } ast::Expr::BlockExpr(e) => { if let Some(block) = e.block() { compute_block_scopes(block, scopes, scope); @@ -153,9 +165,9 @@ fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) { } } ast::Expr::WhileExpr(e) => { - let cond_scope = e.condition().and_then(|cond| { - compute_cond_scopes(cond, scopes, scope) - }); + let cond_scope = e + .condition() + .and_then(|cond| compute_cond_scopes(cond, scopes, scope)); if let Some(block) = e.loop_body() { compute_block_scopes(block, scopes, cond_scope.unwrap_or(scope)); } @@ -201,25 +213,31 @@ fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) { } } } - _ => { - expr.syntax().children() - .filter_map(ast::Expr::cast) - .for_each(|expr| compute_expr_scopes(expr, scopes, scope)) - } + _ => expr + .syntax() + .children() + .filter_map(ast::Expr::cast) + .for_each(|expr| compute_expr_scopes(expr, scopes, scope)), }; fn compute_call_scopes( receiver: Option, arg_list: Option, - scopes: &mut FnScopes, scope: ScopeId, + scopes: &mut FnScopes, + scope: ScopeId, ) { - arg_list.into_iter() + arg_list + .into_iter() .flat_map(|it| it.args()) .chain(receiver) .for_each(|expr| compute_expr_scopes(expr, scopes, scope)); } - fn compute_cond_scopes(cond: ast::Condition, scopes: &mut FnScopes, scope: ScopeId) -> Option { + fn compute_cond_scopes( + cond: ast::Condition, + scopes: &mut FnScopes, + scope: ScopeId, + ) -> Option { if let Some(expr) = cond.expr() { compute_expr_scopes(expr, scopes, scope); } @@ -236,14 +254,18 @@ fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) { #[derive(Debug)] struct ScopeData { parent: Option, - entries: Vec + entries: Vec, } -pub fn resolve_local_name<'a>(name_ref: ast::NameRef, scopes: &'a FnScopes) -> Option<&'a ScopeEntry> { +pub fn resolve_local_name<'a>( + name_ref: ast::NameRef, + scopes: &'a FnScopes, +) -> Option<&'a ScopeEntry> { use rustc_hash::FxHashSet; let mut shadowed = FxHashSet::default(); - let ret = scopes.scope_chain(name_ref.syntax()) + let ret = scopes + .scope_chain(name_ref.syntax()) .flat_map(|scope| scopes.entries(scope).iter()) .filter(|entry| shadowed.insert(entry.name())) .filter(|entry| entry.name() == name_ref.text()) @@ -255,8 +277,8 @@ pub fn resolve_local_name<'a>(name_ref: ast::NameRef, scopes: &'a FnScopes) -> O #[cfg(test)] mod tests { use super::*; - use ra_syntax::File; use crate::{find_node_at_offset, test_utils::extract_offset}; + use ra_syntax::File; fn do_check(code: &str, expected: &[&str]) { let (off, code) = extract_offset(code); @@ -272,7 +294,8 @@ mod tests { let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap(); let scopes = FnScopes::new(fn_def); - let actual = scopes.scope_chain(marker.syntax()) + let actual = scopes + .scope_chain(marker.syntax()) .flat_map(|scope| scopes.entries(scope)) .map(|it| it.name()) .collect::>(); @@ -281,7 +304,8 @@ mod tests { #[test] fn test_lambda_scope() { - do_check(r" + do_check( + r" fn quux(foo: i32) { let f = |bar, baz: i32| { <|> @@ -293,7 +317,8 @@ mod tests { #[test] fn test_call_scope() { - do_check(r" + do_check( + r" fn quux() { f(|x| <|> ); }", @@ -303,7 +328,8 @@ mod tests { #[test] fn test_metod_call_scope() { - do_check(r" + do_check( + r" fn quux() { z.f(|x| <|> ); }", @@ -313,7 +339,8 @@ mod tests { #[test] fn test_loop_scope() { - do_check(r" + do_check( + r" fn quux() { loop { let x = (); @@ -326,7 +353,8 @@ mod tests { #[test] fn test_match() { - do_check(r" + do_check( + r" fn quux() { match () { Some(x) => { @@ -340,7 +368,8 @@ mod tests { #[test] fn test_shadow_variable() { - do_check(r" + do_check( + r" fn foo(x: String) { let x : &str = &x<|>; }", @@ -356,14 +385,20 @@ mod tests { let scopes = FnScopes::new(fn_def); - let local_name = resolve_local_name(name_ref, &scopes).unwrap().ast().name().unwrap(); - let expected_name = find_node_at_offset::(file.syntax(), expected_offset.into()).unwrap(); + let local_name = resolve_local_name(name_ref, &scopes) + .unwrap() + .ast() + .name() + .unwrap(); + let expected_name = + find_node_at_offset::(file.syntax(), expected_offset.into()).unwrap(); assert_eq!(local_name.syntax().range(), expected_name.syntax().range()); } #[test] fn test_resolve_local_name() { - do_check_local_name(r#" + do_check_local_name( + r#" fn foo(x: i32, y: u32) { { let z = x * 2; @@ -372,25 +407,30 @@ mod tests { let t = x<|> * 3; } }"#, - 21); + 21, + ); } #[test] fn test_resolve_local_name_declaration() { - do_check_local_name(r#" + do_check_local_name( + r#" fn foo(x: String) { let x : &str = &x<|>; }"#, - 21); + 21, + ); } #[test] fn test_resolve_local_name_shadow() { - do_check_local_name(r" + do_check_local_name( + r" fn foo(x: String) { let x : &str = &x; x<|> }", - 46); + 46, + ); } } diff --git a/crates/ra_editor/src/scope/mod.rs b/crates/ra_editor/src/scope/mod.rs index 7d6d530f7..cc2d49392 100644 --- a/crates/ra_editor/src/scope/mod.rs +++ b/crates/ra_editor/src/scope/mod.rs @@ -2,7 +2,6 @@ mod fn_scope; mod mod_scope; pub use self::{ - fn_scope::{FnScopes, resolve_local_name}, + fn_scope::{resolve_local_name, FnScopes}, mod_scope::ModuleScope, }; - diff --git a/crates/ra_editor/src/scope/mod_scope.rs b/crates/ra_editor/src/scope/mod_scope.rs index d2a3e7c58..8d7e408f8 100644 --- a/crates/ra_editor/src/scope/mod_scope.rs +++ b/crates/ra_editor/src/scope/mod_scope.rs @@ -1,6 +1,6 @@ use ra_syntax::{ - AstNode, SyntaxNode, SyntaxNodeRef, SmolStr, ast::{self, AstChildren}, + AstNode, SmolStr, SyntaxNode, SyntaxNodeRef, }; pub struct ModuleScope { @@ -13,7 +13,8 @@ pub struct Entry { } enum EntryKind { - Item, Import, + Item, + Import, } impl ModuleScope { @@ -34,9 +35,8 @@ impl ModuleScope { collect_imports(tree, &mut entries); } continue; - }, - ast::ModuleItem::ExternCrateItem(_) | - ast::ModuleItem::ImplItem(_) => continue, + } + ast::ModuleItem::ExternCrateItem(_) | ast::ModuleItem::ImplItem(_) => continue, }; entries.extend(entry) } @@ -52,20 +52,22 @@ impl ModuleScope { impl Entry { fn new<'a>(item: impl ast::NameOwner<'a>) -> Option { let name = item.name()?; - Some(Entry { node: name.syntax().owned(), kind: EntryKind::Item }) + Some(Entry { + node: name.syntax().owned(), + kind: EntryKind::Item, + }) } fn new_import(path: ast::Path) -> Option { let name_ref = path.segment()?.name_ref()?; - Some(Entry { node: name_ref.syntax().owned(), kind: EntryKind::Import }) + Some(Entry { + node: name_ref.syntax().owned(), + kind: EntryKind::Import, + }) } pub fn name(&self) -> SmolStr { match self.kind { - EntryKind::Item => - ast::Name::cast(self.node.borrowed()).unwrap() - .text(), - EntryKind::Import => - ast::NameRef::cast(self.node.borrowed()).unwrap() - .text(), + EntryKind::Item => ast::Name::cast(self.node.borrowed()).unwrap().text(), + EntryKind::Import => ast::NameRef::cast(self.node.borrowed()).unwrap().text(), } } pub fn syntax(&self) -> SyntaxNodeRef { @@ -75,32 +77,31 @@ impl Entry { fn collect_imports(tree: ast::UseTree, acc: &mut Vec) { if let Some(use_tree_list) = tree.use_tree_list() { - return use_tree_list.use_trees().for_each(|it| collect_imports(it, acc)); + return use_tree_list + .use_trees() + .for_each(|it| collect_imports(it, acc)); } if let Some(path) = tree.path() { acc.extend(Entry::new_import(path)); } } - #[cfg(test)] mod tests { use super::*; - use ra_syntax::{File, ast::ModuleItemOwner}; + use ra_syntax::{ast::ModuleItemOwner, File}; fn do_check(code: &str, expected: &[&str]) { let file = File::parse(&code); let scope = ModuleScope::new(file.ast().items()); - let actual = scope.entries - .iter() - .map(|it| it.name()) - .collect::>(); + let actual = scope.entries.iter().map(|it| it.name()).collect::>(); assert_eq!(expected, actual.as_slice()); } #[test] fn test_module_scope() { - do_check(" + do_check( + " struct Foo; enum Bar {} mod baz {} @@ -110,6 +111,8 @@ mod tests { t, }; type T = (); - ", &["Foo", "Bar", "baz", "quux", "z", "t", "T"]) + ", + &["Foo", "Bar", "baz", "quux", "z", "t", "T"], + ) } } diff --git a/crates/ra_editor/src/symbols.rs b/crates/ra_editor/src/symbols.rs index d9e4b2df7..b768b34bc 100644 --- a/crates/ra_editor/src/symbols.rs +++ b/crates/ra_editor/src/symbols.rs @@ -1,12 +1,13 @@ +use crate::TextRange; + use ra_syntax::{ - SyntaxKind, SyntaxNodeRef, AstNode, File, SmolStr, - ast::{self, NameOwner}, algo::{ visit::{visitor, Visitor}, walk::{walk, WalkEvent}, }, + ast::{self, NameOwner}, + AstNode, File, SmolStr, SyntaxKind, SyntaxNodeRef, }; -use crate::TextRange; #[derive(Debug, Clone)] pub struct StructureNode { @@ -25,9 +26,7 @@ pub struct FileSymbol { } pub fn file_symbols(file: &File) -> Vec { - file.syntax().descendants() - .filter_map(to_symbol) - .collect() + file.syntax().descendants().filter_map(to_symbol).collect() } fn to_symbol(node: SyntaxNodeRef) -> Option { @@ -51,23 +50,20 @@ fn to_symbol(node: SyntaxNodeRef) -> Option { .accept(node)? } - pub fn file_structure(file: &File) -> Vec { let mut res = Vec::new(); let mut stack = Vec::new(); for event in walk(file.syntax()) { match event { - WalkEvent::Enter(node) => { - match structure_node(node) { - Some(mut symbol) => { - symbol.parent = stack.last().map(|&n| n); - stack.push(res.len()); - res.push(symbol); - } - None => (), + WalkEvent::Enter(node) => match structure_node(node) { + Some(mut symbol) => { + symbol.parent = stack.last().map(|&n| n); + stack.push(res.len()); + res.push(symbol); } - } + None => (), + }, WalkEvent::Exit(node) => { if structure_node(node).is_some() { stack.pop().unwrap(); @@ -131,7 +127,8 @@ mod tests { #[test] fn test_file_structure() { - let file = File::parse(r#" + let file = File::parse( + r#" struct Foo { x: i32 } @@ -148,7 +145,8 @@ const C: i32 = 92; impl E {} impl fmt::Debug for E {} -"#); +"#, + ); let symbols = file_structure(&file); assert_eq_dbg( r#"[StructureNode { parent: None, label: "Foo", navigation_range: [8; 11), node_range: [1; 26), kind: STRUCT_DEF }, diff --git a/crates/ra_editor/src/test_utils.rs b/crates/ra_editor/src/test_utils.rs index 49eb530d5..bc3d700f6 100644 --- a/crates/ra_editor/src/test_utils.rs +++ b/crates/ra_editor/src/test_utils.rs @@ -1,12 +1,8 @@ -use ra_syntax::{File, TextUnit, TextRange}; -pub use crate::_test_utils::*; use crate::LocalEdit; +pub use crate::_test_utils::*; +use ra_syntax::{File, TextRange, TextUnit}; -pub fn check_action Option> ( - before: &str, - after: &str, - f: F, -) { +pub fn check_action Option>(before: &str, after: &str, f: F) { let (before_cursor_pos, before) = extract_offset(before); let file = File::parse(&before); let result = f(&file, before_cursor_pos).expect("code action is not applicable"); @@ -19,7 +15,7 @@ pub fn check_action Option> ( assert_eq_text!(after, &actual); } -pub fn check_action_range Option> ( +pub fn check_action_range Option>( before: &str, after: &str, f: F, diff --git a/crates/ra_editor/src/typing.rs b/crates/ra_editor/src/typing.rs index 542b9e10b..50b52e7a1 100644 --- a/crates/ra_editor/src/typing.rs +++ b/crates/ra_editor/src/typing.rs @@ -1,32 +1,30 @@ use std::mem; use ra_syntax::{ - TextUnit, TextRange, SyntaxNodeRef, File, AstNode, SyntaxKind, + algo::{find_covering_node, find_leaf_at_offset, LeafAtOffset}, ast, - algo::{ - find_covering_node, find_leaf_at_offset, LeafAtOffset, - }, - text_utils::{intersect, contains_offset_nonstrict}, + text_utils::{contains_offset_nonstrict, intersect}, + AstNode, File, SyntaxKind, SyntaxKind::*, + SyntaxNodeRef, TextRange, TextUnit, }; -use crate::{LocalEdit, EditBuilder, find_node_at_offset}; +use crate::{find_node_at_offset, EditBuilder, LocalEdit}; pub fn join_lines(file: &File, range: TextRange) -> LocalEdit { let range = if range.is_empty() { let syntax = file.syntax(); let text = syntax.text().slice(range.start()..); let pos = match text.find('\n') { - None => return LocalEdit { - edit: EditBuilder::new().finish(), - cursor_position: None - }, - Some(pos) => pos + None => { + return LocalEdit { + edit: EditBuilder::new().finish(), + cursor_position: None, + } + } + Some(pos) => pos, }; - TextRange::offset_len( - range.start() + pos, - TextUnit::of_char('\n'), - ) + TextRange::offset_len(range.start() + pos, TextUnit::of_char('\n')) } else { range }; @@ -58,7 +56,9 @@ pub fn join_lines(file: &File, range: TextRange) -> LocalEdit { } pub fn on_enter(file: &File, offset: TextUnit) -> Option { - let comment = find_leaf_at_offset(file.syntax(), offset).left_biased().and_then(|it| ast::Comment::cast(it))?; + let comment = find_leaf_at_offset(file.syntax(), offset) + .left_biased() + .and_then(|it| ast::Comment::cast(it))?; if let ast::CommentFlavor::Multiline = comment.flavor() { return None; @@ -88,7 +88,7 @@ fn node_indent<'a>(file: &'a File, node: SyntaxNodeRef) -> Option<&'a str> { } LeafAtOffset::Single(n) => { assert!(n == node); - return Some("") + return Some(""); } LeafAtOffset::None => unreachable!(), }; @@ -110,7 +110,12 @@ pub fn on_eq_typed(file: &File, offset: TextUnit) -> Option { if contains_offset_nonstrict(expr_range, offset) && offset != expr_range.start() { return None; } - if file.syntax().text().slice(offset..expr_range.start()).contains('\n') { + if file + .syntax() + .text() + .slice(offset..expr_range.start()) + .contains('\n') + { return None; } } else { @@ -125,12 +130,7 @@ pub fn on_eq_typed(file: &File, offset: TextUnit) -> Option { }) } -fn remove_newline( - edit: &mut EditBuilder, - node: SyntaxNodeRef, - node_text: &str, - offset: TextUnit, -) { +fn remove_newline(edit: &mut EditBuilder, node: SyntaxNodeRef, node_text: &str, offset: TextUnit) { if node.kind() != WHITESPACE || node_text.bytes().filter(|&b| b == b'\n').count() != 1 { // The node is either the first or the last in the file let suff = &node_text[TextRange::from_to( @@ -156,7 +156,7 @@ fn remove_newline( // // into `my_function()` if join_single_expr_block(edit, node).is_some() { - return + return; } // The node is between two other nodes @@ -170,34 +170,28 @@ fn remove_newline( // Adds: a single whitespace edit.replace( TextRange::from_to(prev.range().start(), node.range().end()), - " ".to_string() + " ".to_string(), ); } else if let (Some(_), Some(next)) = (ast::Comment::cast(prev), ast::Comment::cast(next)) { // Removes: newline (incl. surrounding whitespace), start of the next comment edit.delete(TextRange::from_to( node.range().start(), - next.syntax().range().start() + TextUnit::of_str(next.prefix()) + next.syntax().range().start() + TextUnit::of_str(next.prefix()), )); } else { // Remove newline but add a computed amount of whitespace characters - edit.replace( - node.range(), - compute_ws(prev, next).to_string(), - ); + edit.replace(node.range(), compute_ws(prev, next).to_string()); } } fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool { match (left, right) { - (COMMA, R_PAREN) | (COMMA, R_BRACK) => true, - _ => false + (COMMA, R_PAREN) | (COMMA, R_BRACK) => true, + _ => false, } } -fn join_single_expr_block( - edit: &mut EditBuilder, - node: SyntaxNodeRef, -) -> Option<()> { +fn join_single_expr_block(edit: &mut EditBuilder, node: SyntaxNodeRef) -> Option<()> { let block = ast::Block::cast(node.parent()?)?; let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?; let expr = single_expr(block)?; @@ -244,7 +238,7 @@ fn compute_ws(left: SyntaxNodeRef, right: SyntaxNodeRef) -> &'static str { #[cfg(test)] mod tests { use super::*; - use crate::test_utils::{check_action, extract_range, extract_offset, add_cursor}; + use crate::test_utils::{add_cursor, check_action, extract_offset, extract_range}; fn check_join_lines(before: &str, after: &str) { check_action(before, after, |file, offset| { @@ -256,118 +250,142 @@ mod tests { #[test] fn test_join_lines_comma() { - check_join_lines(r" + check_join_lines( + r" fn foo() { <|>foo(1, ) } -", r" +", + r" fn foo() { <|>foo(1) } -"); +", + ); } #[test] fn test_join_lines_lambda_block() { - check_join_lines(r" + check_join_lines( + r" pub fn reparse(&self, edit: &AtomEdit) -> File { <|>self.incremental_reparse(edit).unwrap_or_else(|| { self.full_reparse(edit) }) } -", r" +", + r" pub fn reparse(&self, edit: &AtomEdit) -> File { <|>self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) } -"); +", + ); } #[test] fn test_join_lines_block() { - check_join_lines(r" + check_join_lines( + r" fn foo() { foo(<|>{ 92 }) -}", r" +}", + r" fn foo() { foo(<|>92) -}"); +}", + ); } #[test] fn test_join_lines_normal_comments() { - check_join_lines(r" + check_join_lines( + r" fn foo() { // Hello<|> // world! } -", r" +", + r" fn foo() { // Hello<|> world! } -"); +", + ); } #[test] fn test_join_lines_doc_comments() { - check_join_lines(r" + check_join_lines( + r" fn foo() { /// Hello<|> /// world! } -", r" +", + r" fn foo() { /// Hello<|> world! } -"); +", + ); } #[test] fn test_join_lines_mod_comments() { - check_join_lines(r" + check_join_lines( + r" fn foo() { //! Hello<|> //! world! } -", r" +", + r" fn foo() { //! Hello<|> world! } -"); +", + ); } #[test] fn test_join_lines_multiline_comments_1() { - check_join_lines(r" + check_join_lines( + r" fn foo() { // Hello<|> /* world! */ } -", r" +", + r" fn foo() { // Hello<|> world! */ } -"); +", + ); } #[test] fn test_join_lines_multiline_comments_2() { - check_join_lines(r" + check_join_lines( + r" fn foo() { // The<|> /* quick brown fox! */ } -", r" +", + r" fn foo() { // The<|> quick brown fox! */ } -"); +", + ); } fn check_join_lines_sel(before: &str, after: &str) { @@ -380,59 +398,71 @@ fn foo() { #[test] fn test_join_lines_selection_fn_args() { - check_join_lines_sel(r" + check_join_lines_sel( + r" fn foo() { <|>foo(1, 2, 3, <|>) } - ", r" + ", + r" fn foo() { foo(1, 2, 3) } - "); + ", + ); } #[test] fn test_join_lines_selection_struct() { - check_join_lines_sel(r" + check_join_lines_sel( + r" struct Foo <|>{ f: u32, }<|> - ", r" + ", + r" struct Foo { f: u32 } - "); + ", + ); } #[test] fn test_join_lines_selection_dot_chain() { - check_join_lines_sel(r" + check_join_lines_sel( + r" fn foo() { join(<|>type_params.type_params() .filter_map(|it| it.name()) .map(|it| it.text())<|>) -}", r" +}", + r" fn foo() { join(type_params.type_params().filter_map(|it| it.name()).map(|it| it.text())) -}"); +}", + ); } #[test] fn test_join_lines_selection_lambda_block_body() { - check_join_lines_sel(r" + check_join_lines_sel( + r" pub fn handle_find_matching_brace() { params.offsets .map(|offset| <|>{ world.analysis().matching_brace(&file, offset).unwrap_or(offset) }<|>) .collect(); -}", r" +}", + r" pub fn handle_find_matching_brace() { params.offsets .map(|offset| world.analysis().matching_brace(&file, offset).unwrap_or(offset)) .collect(); -}"); +}", + ); } #[test] @@ -454,15 +484,18 @@ pub fn handle_find_matching_brace() { // let foo =; // } // "); - do_check(r" + do_check( + r" fn foo() { let foo =<|> 1 + 1 } -", r" +", + r" fn foo() { let foo = 1 + 1; } -"); +", + ); // do_check(r" // fn foo() { // let foo =<|> @@ -496,28 +529,34 @@ fn foo() { assert!(apply_on_enter(text).is_none()) } - do_check(r" + do_check( + r" /// Some docs<|> fn foo() { } -", r" +", + r" /// Some docs /// <|> fn foo() { } -"); - do_check(r" +", + ); + do_check( + r" impl S { /// Some<|> docs. fn foo() {} } -", r" +", + r" impl S { /// Some /// <|> docs. fn foo() {} } -"); +", + ); do_check_noop(r"<|>//! docz"); } } -- cgit v1.2.3