From 6be50f7d5de3737464853a589673375fc0cafa97 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Wed, 31 Oct 2018 23:41:43 +0300 Subject: Reformat all --- crates/ra_analysis/src/completion.rs | 57 ++++++----- crates/ra_analysis/src/db.rs | 13 +-- crates/ra_analysis/src/descriptors/function/imp.rs | 12 +-- crates/ra_analysis/src/descriptors/function/mod.rs | 29 +++--- .../ra_analysis/src/descriptors/function/scope.rs | 7 +- crates/ra_analysis/src/descriptors/mod.rs | 39 ++++---- crates/ra_analysis/src/descriptors/module/imp.rs | 33 ++++--- crates/ra_analysis/src/descriptors/module/mod.rs | 30 +++--- crates/ra_analysis/src/descriptors/module/scope.rs | 5 +- crates/ra_analysis/src/imp.rs | 77 ++++++++------- crates/ra_analysis/src/input.rs | 9 +- crates/ra_analysis/src/lib.rs | 47 ++++----- crates/ra_analysis/src/mock_analysis.rs | 17 ++-- crates/ra_analysis/src/symbol_index.rs | 5 +- crates/ra_analysis/src/syntax_ptr.rs | 23 +++-- crates/ra_analysis/tests/tests.rs | 105 ++++++++++++++------- crates/ra_editor/src/folding_ranges.rs | 12 ++- crates/ra_editor/src/lib.rs | 2 - crates/ra_editor/src/symbols.rs | 10 +- crates/ra_lsp_server/src/caps.rs | 10 +- crates/ra_lsp_server/src/conv.rs | 3 +- crates/ra_lsp_server/src/lib.rs | 2 +- crates/ra_lsp_server/src/main_loop/handlers.rs | 87 ++++++++--------- crates/ra_lsp_server/src/main_loop/mod.rs | 31 +++--- crates/ra_lsp_server/src/path_map.rs | 3 +- crates/ra_lsp_server/src/req.rs | 4 +- crates/ra_lsp_server/src/server_world.rs | 8 +- crates/ra_syntax/src/algo/mod.rs | 5 +- crates/ra_syntax/src/ast/mod.rs | 16 +++- crates/ra_syntax/src/lib.rs | 4 +- crates/ra_syntax/src/utils.rs | 4 +- crates/test_utils/src/lib.rs | 31 +++--- crates/tools/src/lib.rs | 31 +++--- crates/tools/src/main.rs | 8 +- crates/tools/tests/cli.rs | 9 +- 35 files changed, 422 insertions(+), 366 deletions(-) (limited to 'crates') diff --git a/crates/ra_analysis/src/completion.rs b/crates/ra_analysis/src/completion.rs index 286b6c376..6bd1233f9 100644 --- a/crates/ra_analysis/src/completion.rs +++ b/crates/ra_analysis/src/completion.rs @@ -1,22 +1,22 @@ -use rustc_hash::{FxHashMap, FxHashSet}; -use ra_editor::{find_node_at_offset}; +use ra_editor::find_node_at_offset; use ra_syntax::{ - AtomEdit, File, TextUnit, AstNode, SyntaxNodeRef, algo::visit::{visitor, visitor_ctx, Visitor, VisitorCtx}, ast::{self, AstChildren, LoopBodyOwner, ModuleItemOwner}, + AstNode, AtomEdit, File, SyntaxKind::*, + SyntaxNodeRef, TextUnit, }; +use rustc_hash::{FxHashMap, FxHashSet}; use crate::{ - FileId, Cancelable, - input::FilesDatabase, db::{self, SyntaxDatabase}, - descriptors::DescriptorDatabase, descriptors::function::FnScopes, - descriptors::module::{ModuleTree, ModuleId, ModuleScope}, + descriptors::module::{ModuleId, ModuleScope, ModuleTree}, + descriptors::DescriptorDatabase, + input::FilesDatabase, + Cancelable, FileId, }; - #[derive(Debug)] pub struct CompletionItem { /// What user sees in pop-up @@ -27,7 +27,11 @@ pub struct CompletionItem { pub snippet: Option, } -pub(crate) fn resolve_based_completion(db: &db::RootDatabase, file_id: FileId, offset: TextUnit) -> Cancelable>> { +pub(crate) fn resolve_based_completion( + db: &db::RootDatabase, + file_id: FileId, + offset: TextUnit, +) -> Cancelable>> { let source_root_id = db.file_source_root(file_id); let file = db.file_syntax(file_id); let module_tree = db.module_tree(source_root_id)?; @@ -56,9 +60,12 @@ pub(crate) fn resolve_based_completion(db: &db::RootDatabase, file_id: FileId, o Ok(Some(res)) } - - -pub(crate) fn find_target_module(module_tree: &ModuleTree, module_id: ModuleId, file: &File, offset: TextUnit) -> Option { +pub(crate) fn find_target_module( + module_tree: &ModuleTree, + module_id: ModuleId, + file: &File, + offset: TextUnit, +) -> Option { let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset)?; let mut crate_path = crate_path(name_ref)?; @@ -71,8 +78,10 @@ pub(crate) fn find_target_module(module_tree: &ModuleTree, module_id: ModuleId, } fn crate_path(name_ref: ast::NameRef) -> Option> { - let mut path = name_ref.syntax() - .parent().and_then(ast::PathSegment::cast)? + let mut path = name_ref + .syntax() + .parent() + .and_then(ast::PathSegment::cast)? .parent_path(); let mut res = Vec::new(); loop { @@ -80,8 +89,7 @@ fn crate_path(name_ref: ast::NameRef) -> Option> { match segment.kind()? { ast::PathSegmentKind::Name(name) => res.push(name), ast::PathSegmentKind::CrateKw => break, - ast::PathSegmentKind::SelfKw | ast::PathSegmentKind::SuperKw => - return None, + ast::PathSegmentKind::SelfKw | ast::PathSegmentKind::SuperKw => return None, } path = path.qualifier()?; } @@ -89,7 +97,6 @@ fn crate_path(name_ref: ast::NameRef) -> Option> { Some(res) } - pub(crate) fn scope_completion( db: &db::RootDatabase, file_id: FileId, @@ -158,11 +165,7 @@ fn complete_module_items( ); } -fn complete_name_ref( - file: &File, - name_ref: ast::NameRef, - acc: &mut Vec, -) { +fn complete_name_ref(file: &File, name_ref: ast::NameRef, acc: &mut Vec) { if !is_node::(name_ref.syntax()) { return; } @@ -273,7 +276,11 @@ fn is_in_loop_body(name_ref: ast::NameRef) -> bool { .visit::(LoopBodyOwner::loop_body) .accept(node); if let Some(Some(body)) = loop_body { - if name_ref.syntax().range().is_subrange(&body.syntax().range()) { + if name_ref + .syntax() + .range() + .is_subrange(&body.syntax().range()) + { return true; } } @@ -368,9 +375,9 @@ fn complete_fn(name_ref: ast::NameRef, scopes: &FnScopes, acc: &mut Vec, pub params: Vec, - pub doc: Option + pub doc: Option, } impl FnDescriptor { @@ -57,7 +52,9 @@ impl FnDescriptor { }; if let Some((comment_range, docs)) = FnDescriptor::extract_doc_comments(node) { - let comment_range = comment_range.checked_sub(node.syntax().range().start()).unwrap(); + let comment_range = comment_range + .checked_sub(node.syntax().range().start()) + .unwrap(); let start = comment_range.start().to_usize(); let end = comment_range.end().to_usize(); @@ -94,7 +91,7 @@ impl FnDescriptor { ret_type, params, label: label.trim().to_owned(), - doc + doc, }) } @@ -105,10 +102,13 @@ impl FnDescriptor { let comment_text = node.doc_comment_text(); - let (begin, end) = node.doc_comments() + let (begin, end) = node + .doc_comments() .map(|comment| comment.syntax().range()) .map(|range| (range.start().to_usize(), range.end().to_usize())) - .fold((std::usize::MAX, std::usize::MIN), |acc, range| (min(acc.0, range.0), max(acc.1, range.1))); + .fold((std::usize::MAX, std::usize::MIN), |acc, range| { + (min(acc.0, range.0), max(acc.1, range.1)) + }); let range = TextRange::from_to(TextUnit::from_usize(begin), TextUnit::from_usize(end)); @@ -134,4 +134,3 @@ impl FnDescriptor { res } } - diff --git a/crates/ra_analysis/src/descriptors/function/scope.rs b/crates/ra_analysis/src/descriptors/function/scope.rs index d9929414c..62b46ffba 100644 --- a/crates/ra_analysis/src/descriptors/function/scope.rs +++ b/crates/ra_analysis/src/descriptors/function/scope.rs @@ -51,9 +51,7 @@ impl FnScopes { &self.get(scope).entries } pub fn scope_chain<'a>(&'a self, node: SyntaxNodeRef) -> impl Iterator + 'a { - generate(self.scope_for(node), move |&scope| { - self.get(scope).parent - }) + generate(self.scope_for(node), move |&scope| self.get(scope).parent) } fn root_scope(&mut self) -> ScopeId { let res = ScopeId(self.scopes.len() as u32); @@ -273,13 +271,12 @@ pub fn resolve_local_name<'a>( #[cfg(test)] mod tests { + use ra_editor::find_node_at_offset; use ra_syntax::File; use test_utils::extract_offset; - use ra_editor::{find_node_at_offset}; use super::*; - fn do_check(code: &str, expected: &[&str]) { let (off, code) = extract_offset(code); let code = { diff --git a/crates/ra_analysis/src/descriptors/mod.rs b/crates/ra_analysis/src/descriptors/mod.rs index e27f8314a..c28764336 100644 --- a/crates/ra_analysis/src/descriptors/mod.rs +++ b/crates/ra_analysis/src/descriptors/mod.rs @@ -1,24 +1,22 @@ -pub(crate) mod module; pub(crate) mod function; +pub(crate) mod module; use std::sync::Arc; use ra_syntax::{ - SmolStr, ast::{self, AstNode, FnDefNode}, - TextRange + SmolStr, TextRange, }; use crate::{ - FileId, Cancelable, db::SyntaxDatabase, - descriptors::module::{ModuleTree, ModuleId, ModuleScope}, - descriptors::function::{FnId, FnScopes, resolve_local_name}, + descriptors::function::{resolve_local_name, FnId, FnScopes}, + descriptors::module::{ModuleId, ModuleScope, ModuleTree}, input::SourceRootId, - syntax_ptr::{SyntaxPtrDatabase, LocalSyntaxPtr}, + syntax_ptr::{LocalSyntaxPtr, SyntaxPtrDatabase}, + Cancelable, FileId, }; - salsa::query_group! { pub(crate) trait DescriptorDatabase: SyntaxDatabase + SyntaxPtrDatabase { fn module_tree(source_root_id: SourceRootId) -> Cancelable> { @@ -49,23 +47,20 @@ salsa::query_group! { #[derive(Debug)] pub struct ReferenceDescriptor { pub range: TextRange, - pub name: String + pub name: String, } #[derive(Debug)] pub struct DeclarationDescriptor<'a> { pat: ast::BindPat<'a>, - pub range: TextRange + pub range: TextRange, } impl<'a> DeclarationDescriptor<'a> { pub fn new(pat: ast::BindPat) -> DeclarationDescriptor { let range = pat.syntax().range(); - DeclarationDescriptor { - pat, - range - } + DeclarationDescriptor { pat, range } } pub fn find_all_refs(&self) -> Vec { @@ -73,22 +68,22 @@ impl<'a> DeclarationDescriptor<'a> { let fn_def = match self.pat.syntax().ancestors().find_map(ast::FnDef::cast) { Some(def) => def, - None => return Default::default() + None => return Default::default(), }; let fn_scopes = FnScopes::new(fn_def); - let refs : Vec<_> = fn_def.syntax().descendants() + let refs: Vec<_> = fn_def + .syntax() + .descendants() .filter_map(ast::NameRef::cast) - .filter(|name_ref| { - match resolve_local_name(*name_ref, &fn_scopes) { - None => false, - Some(entry) => entry.ptr() == name_ptr, - } + .filter(|name_ref| match resolve_local_name(*name_ref, &fn_scopes) { + None => false, + Some(entry) => entry.ptr() == name_ptr, }) .map(|name_ref| ReferenceDescriptor { name: name_ref.syntax().text().to_string(), - range : name_ref.syntax().range(), + range: name_ref.syntax().range(), }) .collect(); diff --git a/crates/ra_analysis/src/descriptors/module/imp.rs b/crates/ra_analysis/src/descriptors/module/imp.rs index dae3a356d..1c102f4e5 100644 --- a/crates/ra_analysis/src/descriptors/module/imp.rs +++ b/crates/ra_analysis/src/descriptors/module/imp.rs @@ -1,24 +1,25 @@ use std::sync::Arc; -use relative_path::RelativePathBuf; -use rustc_hash::{FxHashMap, FxHashSet}; use ra_syntax::{ - SmolStr, ast::{self, NameOwner}, + SmolStr, }; +use relative_path::RelativePathBuf; +use rustc_hash::{FxHashMap, FxHashSet}; use crate::{ - FileId, Cancelable, FileResolverImp, db, - input::{SourceRoot, SourceRootId}, + db, descriptors::DescriptorDatabase, + input::{SourceRoot, SourceRootId}, + Cancelable, FileId, FileResolverImp, }; -use super::{ - ModuleData, ModuleTree, ModuleId, LinkId, LinkData, Problem, ModuleScope -}; - +use super::{LinkData, LinkId, ModuleData, ModuleId, ModuleScope, ModuleTree, Problem}; -pub(crate) fn submodules(db: &impl DescriptorDatabase, file_id: FileId) -> Cancelable>> { +pub(crate) fn submodules( + db: &impl DescriptorDatabase, + file_id: FileId, +) -> Cancelable>> { db::check_canceled(db)?; let file = db.file_syntax(file_id); let root = file.ast(); @@ -57,13 +58,11 @@ pub(crate) fn module_tree( Ok(Arc::new(res)) } - #[derive(Clone, Hash, PartialEq, Eq, Debug)] pub struct Submodule { pub name: SmolStr, } - fn create_module_tree<'a>( db: &impl DescriptorDatabase, source_root: SourceRootId, @@ -82,7 +81,15 @@ fn create_module_tree<'a>( continue; // TODO: use explicit crate_roots here } assert!(!roots.contains_key(&file_id)); - let module_id = build_subtree(db, &source_root, &mut tree, &mut visited, &mut roots, None, file_id)?; + let module_id = build_subtree( + db, + &source_root, + &mut tree, + &mut visited, + &mut roots, + None, + file_id, + )?; roots.insert(file_id, module_id); } Ok(tree) diff --git a/crates/ra_analysis/src/descriptors/module/mod.rs b/crates/ra_analysis/src/descriptors/module/mod.rs index 667553f74..302e3e81c 100644 --- a/crates/ra_analysis/src/descriptors/module/mod.rs +++ b/crates/ra_analysis/src/descriptors/module/mod.rs @@ -1,8 +1,11 @@ pub(super) mod imp; pub(crate) mod scope; +use ra_syntax::{ + ast::{self, AstNode, NameOwner}, + SmolStr, SyntaxNode, +}; use relative_path::RelativePathBuf; -use ra_syntax::{ast::{self, NameOwner, AstNode}, SmolStr, SyntaxNode}; use crate::FileId; @@ -16,9 +19,11 @@ pub(crate) struct ModuleTree { impl ModuleTree { pub(crate) fn modules_for_file(&self, file_id: FileId) -> Vec { - self.mods.iter() + self.mods + .iter() .enumerate() - .filter(|(_idx, it)| it.file_id == file_id).map(|(idx, _)| ModuleId(idx as u32)) + .filter(|(_idx, it)| it.file_id == file_id) + .map(|(idx, _)| ModuleId(idx as u32)) .collect() } @@ -50,7 +55,7 @@ impl ModuleId { } pub(crate) fn parent_link(self, tree: &ModuleTree) -> Option { tree.module(self).parent - } + } pub(crate) fn parent(self, tree: &ModuleTree) -> Option { let link = self.parent_link(tree)?; Some(tree.link(link).owner) @@ -69,18 +74,15 @@ impl ModuleId { curr } pub(crate) fn child(self, tree: &ModuleTree, name: &str) -> Option { - let link = tree.module(self) + let link = tree + .module(self) .children .iter() .map(|&it| tree.link(it)) .find(|it| it.name == name)?; Some(*link.points_to.first()?) } - pub(crate) fn problems( - self, - tree: &ModuleTree, - root: ast::Root, - ) -> Vec<(SyntaxNode, Problem)> { + pub(crate) fn problems(self, tree: &ModuleTree, root: ast::Root) -> Vec<(SyntaxNode, Problem)> { tree.module(self) .children .iter() @@ -98,11 +100,7 @@ impl LinkId { pub(crate) fn owner(self, tree: &ModuleTree) -> ModuleId { tree.link(self).owner } - pub(crate) fn bind_source<'a>( - self, - tree: &ModuleTree, - root: ast::Root<'a>, - ) -> ast::Module<'a> { + pub(crate) fn bind_source<'a>(self, tree: &ModuleTree, root: ast::Root<'a>) -> ast::Module<'a> { imp::modules(root) .find(|(name, _)| name == &tree.link(self).name) .unwrap() @@ -125,7 +123,6 @@ struct LinkData { problem: Option, } - impl ModuleTree { fn module(&self, id: ModuleId) -> &ModuleData { &self.mods[id.0 as usize] @@ -152,4 +149,3 @@ impl ModuleTree { id } } - diff --git a/crates/ra_analysis/src/descriptors/module/scope.rs b/crates/ra_analysis/src/descriptors/module/scope.rs index 846b8b44f..681e272c2 100644 --- a/crates/ra_analysis/src/descriptors/module/scope.rs +++ b/crates/ra_analysis/src/descriptors/module/scope.rs @@ -1,9 +1,8 @@ //! Backend for module-level scope resolution & completion - use ra_syntax::{ ast::{self, ModuleItemOwner}, - File, AstNode, SmolStr, + AstNode, File, SmolStr, }; use crate::syntax_ptr::LocalSyntaxPtr; @@ -103,7 +102,7 @@ fn collect_imports(tree: ast::UseTree, acc: &mut Vec) { #[cfg(test)] mod tests { use super::*; - use ra_syntax::{File}; + use ra_syntax::File; fn do_check(code: &str, expected: &[&str]) { let file = File::parse(&code); diff --git a/crates/ra_analysis/src/imp.rs b/crates/ra_analysis/src/imp.rs index 1eb8cb912..44077b507 100644 --- a/crates/ra_analysis/src/imp.rs +++ b/crates/ra_analysis/src/imp.rs @@ -13,24 +13,21 @@ use ra_syntax::{ use rayon::prelude::*; use relative_path::RelativePath; use rustc_hash::FxHashSet; -use salsa::{ParallelDatabase, Database}; +use salsa::{Database, ParallelDatabase}; use crate::{ - AnalysisChange, - db::{ - self, SyntaxDatabase, FileSyntaxQuery, - }, - input::{SourceRootId, FilesDatabase, SourceRoot, WORKSPACE}, + completion::{resolve_based_completion, scope_completion, CompletionItem}, + db::{self, FileSyntaxQuery, SyntaxDatabase}, descriptors::{ - DescriptorDatabase, DeclarationDescriptor, - module::{ModuleTree, Problem}, function::{FnDescriptor, FnId}, + module::{ModuleTree, Problem}, + DeclarationDescriptor, DescriptorDatabase, }, - completion::{scope_completion, resolve_based_completion, CompletionItem}, + input::{FilesDatabase, SourceRoot, SourceRootId, WORKSPACE}, symbol_index::SymbolIndex, syntax_ptr::SyntaxPtrDatabase, - CrateGraph, CrateId, Diagnostic, FileId, FileResolver, FileSystemEdit, Position, - Query, SourceChange, SourceFileEdit, Cancelable, + AnalysisChange, Cancelable, CrateGraph, CrateId, Diagnostic, FileId, FileResolver, + FileSystemEdit, Position, Query, SourceChange, SourceFileEdit, }; #[derive(Clone, Debug)] @@ -94,7 +91,6 @@ pub(crate) struct AnalysisHostImpl { db: db::RootDatabase, } - impl AnalysisHostImpl { pub fn new() -> AnalysisHostImpl { let db = db::RootDatabase::default(); @@ -108,7 +104,7 @@ impl AnalysisHostImpl { } pub fn analysis(&self) -> AnalysisImpl { AnalysisImpl { - db: self.db.fork() // freeze revision here + db: self.db.fork(), // freeze revision here } } pub fn apply_change(&mut self, change: AnalysisChange) { @@ -120,7 +116,8 @@ impl AnalysisHostImpl { .set(file_id, Arc::new(text)) } if !(change.files_added.is_empty() && change.files_removed.is_empty()) { - let file_resolver = change.file_resolver + let file_resolver = change + .file_resolver .expect("change resolver when changing set of files"); let mut source_root = SourceRoot::clone(&self.db.source_root(WORKSPACE)); for (file_id, text) in change.files_added { @@ -174,7 +171,8 @@ impl AnalysisHostImpl { .set((), Arc::new(libraries)); } if let Some(crate_graph) = change.crate_graph { - self.db.query(crate::input::CrateGraphQuery) + self.db + .query(crate::input::CrateGraphQuery) .set((), Arc::new(crate_graph)) } } @@ -194,18 +192,22 @@ impl AnalysisImpl { } pub fn world_symbols(&self, query: Query) -> Cancelable> { let buf: Vec> = if query.libs { - self.db.libraries().iter() + self.db + .libraries() + .iter() .map(|&lib_id| self.db.library_symbols(lib_id)) .collect() } else { let files = &self.db.source_root(WORKSPACE).files; let db = self.db.clone(); - files.par_iter() + files + .par_iter() .map_with(db, |db, &file_id| db.file_symbols(file_id)) .filter_map(|it| it.ok()) .collect() }; - self.db.query(FileSyntaxQuery) + self.db + .query(FileSyntaxQuery) .sweep(salsa::SweepStrategy::default().discard_values()); Ok(query.search(&buf)) } @@ -216,7 +218,8 @@ impl AnalysisImpl { pub fn parent_module(&self, file_id: FileId) -> Cancelable> { let module_tree = self.module_tree(file_id)?; - let res = module_tree.modules_for_file(file_id) + let res = module_tree + .modules_for_file(file_id) .into_iter() .filter_map(|module_id| { let link = module_id.parent_link(&module_tree)?; @@ -237,7 +240,8 @@ impl AnalysisImpl { pub fn crate_for(&self, file_id: FileId) -> Cancelable> { let module_tree = self.module_tree(file_id)?; let crate_graph = self.db.crate_graph(); - let res = module_tree.modules_for_file(file_id) + let res = module_tree + .modules_for_file(file_id) .into_iter() .map(|it| it.root(&module_tree)) .map(|it| it.file_id(&module_tree)) @@ -249,7 +253,11 @@ impl AnalysisImpl { pub fn crate_root(&self, crate_id: CrateId) -> FileId { self.db.crate_graph().crate_roots[&crate_id] } - pub fn completions(&self, file_id: FileId, offset: TextUnit) -> Cancelable>> { + pub fn completions( + &self, + file_id: FileId, + offset: TextUnit, + ) -> Cancelable>> { let mut res = Vec::new(); let mut has_completions = false; if let Some(scope_based) = scope_completion(&self.db, file_id, offset) { @@ -260,11 +268,7 @@ impl AnalysisImpl { res.extend(scope_based); has_completions = true; } - let res = if has_completions { - Some(res) - } else { - None - }; + let res = if has_completions { Some(res) } else { None }; Ok(res) } pub fn approximately_resolve_symbol( @@ -326,12 +330,11 @@ impl AnalysisImpl { let syntax = file.syntax(); // Find the binding associated with the offset - let maybe_binding = find_node_at_offset::(syntax, offset) - .or_else(|| { - let name_ref = find_node_at_offset::(syntax, offset)?; - let resolved = resolve_local_name(&self.db, file_id, name_ref)?; - find_node_at_offset::(syntax, resolved.1.end()) - }); + let maybe_binding = find_node_at_offset::(syntax, offset).or_else(|| { + let name_ref = find_node_at_offset::(syntax, offset)?; + let resolved = resolve_local_name(&self.db, file_id, name_ref)?; + find_node_at_offset::(syntax, resolved.1.end()) + }); let binding = match maybe_binding { None => return Vec::new(), @@ -341,8 +344,11 @@ impl AnalysisImpl { let decl = DeclarationDescriptor::new(binding); let mut ret = vec![(file_id, decl.range)]; - ret.extend(decl.find_all_refs().into_iter() - .map(|ref_desc| (file_id, ref_desc.range ))); + ret.extend( + decl.find_all_refs() + .into_iter() + .map(|ref_desc| (file_id, ref_desc.range)), + ); ret } @@ -526,7 +532,8 @@ impl AnalysisImpl { Some(id) => id, None => return Vec::new(), }; - module_id.child(module_tree, name.as_str()) + module_id + .child(module_tree, name.as_str()) .map(|it| it.file_id(module_tree)) .into_iter() .collect() diff --git a/crates/ra_analysis/src/input.rs b/crates/ra_analysis/src/input.rs index b89b45133..ba8a17fd5 100644 --- a/crates/ra_analysis/src/input.rs +++ b/crates/ra_analysis/src/input.rs @@ -1,12 +1,9 @@ -use std::{ - sync::Arc, - fmt, -}; +use std::{fmt, sync::Arc}; -use salsa; -use rustc_hash::FxHashSet; use relative_path::RelativePath; use rustc_hash::FxHashMap; +use rustc_hash::FxHashSet; +use salsa; use crate::{symbol_index::SymbolIndex, FileResolverImp}; diff --git a/crates/ra_analysis/src/lib.rs b/crates/ra_analysis/src/lib.rs index e75411ec9..32aa7a1fd 100644 --- a/crates/ra_analysis/src/lib.rs +++ b/crates/ra_analysis/src/lib.rs @@ -6,23 +6,20 @@ extern crate relative_path; extern crate rustc_hash; extern crate salsa; -mod input; +mod completion; mod db; mod descriptors; mod imp; +mod input; +pub mod mock_analysis; mod symbol_index; -mod completion; mod syntax_ptr; -pub mod mock_analysis; -use std::{ - fmt, - sync::Arc, -}; +use std::{fmt, sync::Arc}; use ra_syntax::{AtomEdit, File, TextRange, TextUnit}; -use relative_path::RelativePathBuf; use rayon::prelude::*; +use relative_path::RelativePathBuf; use crate::{ imp::{AnalysisHostImpl, AnalysisImpl, FileResolverImp}, @@ -30,13 +27,12 @@ use crate::{ }; pub use crate::{ - descriptors::function::FnDescriptor, completion::CompletionItem, - input::{FileId, FileResolver, CrateGraph, CrateId}, + descriptors::function::FnDescriptor, + input::{CrateGraph, CrateId, FileId, FileResolver}, }; pub use ra_editor::{ - FileSymbol, Fold, FoldKind, HighlightedRange, LineIndex, Runnable, - RunnableKind, StructureNode, + FileSymbol, Fold, FoldKind, HighlightedRange, LineIndex, Runnable, RunnableKind, StructureNode, }; #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] @@ -50,8 +46,7 @@ impl std::fmt::Display for Canceled { } } -impl std::error::Error for Canceled { -} +impl std::error::Error for Canceled {} #[derive(Default)] pub struct AnalysisChange { @@ -76,7 +71,6 @@ impl fmt::Debug for AnalysisChange { } } - impl AnalysisChange { pub fn new() -> AnalysisChange { AnalysisChange::default() @@ -251,12 +245,15 @@ impl Analysis { pub fn approximately_resolve_symbol( &self, file_id: FileId, - offset: TextUnit + offset: TextUnit, ) -> Cancelable> { - self.imp - .approximately_resolve_symbol(file_id, offset) + self.imp.approximately_resolve_symbol(file_id, offset) } - pub fn find_all_refs(&self, file_id: FileId, offset: TextUnit, ) -> Cancelable> { + pub fn find_all_refs( + &self, + file_id: FileId, + offset: TextUnit, + ) -> Cancelable> { Ok(self.imp.find_all_refs(file_id, offset)) } pub fn parent_module(&self, file_id: FileId) -> Cancelable> { @@ -276,7 +273,11 @@ impl Analysis { let file = self.imp.file_syntax(file_id); Ok(ra_editor::highlight(&file)) } - pub fn completions(&self, file_id: FileId, offset: TextUnit) -> Cancelable>> { + pub fn completions( + &self, + file_id: FileId, + offset: TextUnit, + ) -> Cancelable>> { self.imp.completions(file_id, offset) } pub fn assists(&self, file_id: FileId, range: TextRange) -> Cancelable> { @@ -307,7 +308,11 @@ impl LibraryData { let file = File::parse(text); (*file_id, file) })); - LibraryData { files, file_resolver: FileResolverImp::new(file_resolver), symbol_index } + LibraryData { + files, + file_resolver: FileResolverImp::new(file_resolver), + symbol_index, + } } } diff --git a/crates/ra_analysis/src/mock_analysis.rs b/crates/ra_analysis/src/mock_analysis.rs index f72911192..76100f548 100644 --- a/crates/ra_analysis/src/mock_analysis.rs +++ b/crates/ra_analysis/src/mock_analysis.rs @@ -1,13 +1,10 @@ - use std::sync::Arc; -use relative_path::{RelativePath, RelativePathBuf}; use ra_syntax::TextUnit; +use relative_path::{RelativePath, RelativePathBuf}; use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER}; -use crate::{ - AnalysisChange, Analysis, AnalysisHost, FileId, FileResolver, -}; +use crate::{Analysis, AnalysisChange, AnalysisHost, FileId, FileResolver}; #[derive(Debug)] pub struct FilePosition { @@ -51,7 +48,10 @@ impl MockAnalysis { let mut res = MockAnalysis::new(); for entry in parse_fixture(fixture) { if entry.text.contains(CURSOR_MARKER) { - assert!(position.is_none(), "only one marker (<|>) per fixture is allowed"); + assert!( + position.is_none(), + "only one marker (<|>) per fixture is allowed" + ); position = Some(res.add_file_with_position(&entry.meta, &entry.text)); } else { res.add_file(&entry.meta, &entry.text); @@ -73,7 +73,10 @@ impl MockAnalysis { FilePosition { file_id, offset } } pub fn id_of(&self, path: &str) -> FileId { - let (idx, _) = self.files.iter().enumerate() + let (idx, _) = self + .files + .iter() + .enumerate() .find(|(_, (p, _text))| path == p) .expect("no file in this mock"); FileId(idx as u32 + 1) diff --git a/crates/ra_analysis/src/symbol_index.rs b/crates/ra_analysis/src/symbol_index.rs index 5f302cbda..b57ad5d33 100644 --- a/crates/ra_analysis/src/symbol_index.rs +++ b/crates/ra_analysis/src/symbol_index.rs @@ -57,10 +57,7 @@ impl SymbolIndex { } impl Query { - pub(crate) fn search( - self, - indices: &[Arc], - ) -> Vec<(FileId, FileSymbol)> { + pub(crate) fn search(self, indices: &[Arc]) -> Vec<(FileId, FileSymbol)> { let mut op = fst::map::OpBuilder::new(); for file_symbols in indices.iter() { let automaton = fst::automaton::Subsequence::new(&self.lowercased); diff --git a/crates/ra_analysis/src/syntax_ptr.rs b/crates/ra_analysis/src/syntax_ptr.rs index c3c904633..612f75cd9 100644 --- a/crates/ra_analysis/src/syntax_ptr.rs +++ b/crates/ra_analysis/src/syntax_ptr.rs @@ -1,12 +1,12 @@ use std::marker::PhantomData; use ra_syntax::{ - File, TextRange, SyntaxKind, SyntaxNode, SyntaxNodeRef, ast::{self, AstNode}, + File, SyntaxKind, SyntaxNode, SyntaxNodeRef, TextRange, }; -use crate::FileId; use crate::db::SyntaxDatabase; +use crate::FileId; salsa::query_group! { pub(crate) trait SyntaxPtrDatabase: SyntaxDatabase { @@ -52,12 +52,10 @@ trait ToAst { impl<'a> ToAst for &'a OwnedAst> { type Ast = ast::FnDef<'a>; fn to_ast(self) -> ast::FnDef<'a> { - ast::FnDef::cast(self.syntax.borrowed()) - .unwrap() + ast::FnDef::cast(self.syntax.borrowed()).unwrap() } } - /// A pionter to a syntax node inside a file. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub(crate) struct LocalSyntaxPtr { @@ -79,22 +77,29 @@ impl LocalSyntaxPtr { if curr.range() == self.range && curr.kind() == self.kind { return curr.owned(); } - curr = curr.children() + curr = curr + .children() .find(|it| self.range.is_subrange(&it.range())) .unwrap_or_else(|| panic!("can't resolve local ptr to SyntaxNode: {:?}", self)) } } pub(crate) fn into_global(self, file_id: FileId) -> SyntaxPtr { - SyntaxPtr { file_id, local: self} + SyntaxPtr { + file_id, + local: self, + } } } - #[test] fn test_local_syntax_ptr() { let file = File::parse("struct Foo { f: u32, }"); - let field = file.syntax().descendants().find_map(ast::NamedFieldDef::cast).unwrap(); + let field = file + .syntax() + .descendants() + .find_map(ast::NamedFieldDef::cast) + .unwrap(); let ptr = LocalSyntaxPtr::new(field.syntax()); let field_syntax = ptr.resolve(&file); assert_eq!(field.syntax(), field_syntax); diff --git a/crates/ra_analysis/tests/tests.rs b/crates/ra_analysis/tests/tests.rs index 22d27cdbe..c2754c8e4 100644 --- a/crates/ra_analysis/tests/tests.rs +++ b/crates/ra_analysis/tests/tests.rs @@ -5,42 +5,53 @@ extern crate relative_path; extern crate rustc_hash; extern crate test_utils; -use ra_syntax::{TextRange}; -use test_utils::{assert_eq_dbg}; +use ra_syntax::TextRange; +use test_utils::assert_eq_dbg; use ra_analysis::{ + mock_analysis::{analysis_and_position, single_file, single_file_with_position, MockAnalysis}, AnalysisChange, CrateGraph, FileId, FnDescriptor, - mock_analysis::{MockAnalysis, single_file, single_file_with_position, analysis_and_position}, }; fn get_signature(text: &str) -> (FnDescriptor, Option) { let (analysis, position) = single_file_with_position(text); - analysis.resolve_callable(position.file_id, position.offset).unwrap().unwrap() + analysis + .resolve_callable(position.file_id, position.offset) + .unwrap() + .unwrap() } #[test] fn test_resolve_module() { - let (analysis, pos) = analysis_and_position(" + let (analysis, pos) = analysis_and_position( + " //- /lib.rs mod <|>foo; //- /foo.rs // empty - "); + ", + ); - let symbols = analysis.approximately_resolve_symbol(pos.file_id, pos.offset).unwrap(); + let symbols = analysis + .approximately_resolve_symbol(pos.file_id, pos.offset) + .unwrap(); assert_eq_dbg( r#"[(FileId(2), FileSymbol { name: "foo", node_range: [0; 0), kind: MODULE })]"#, &symbols, ); - let (analysis, pos) = analysis_and_position(" + let (analysis, pos) = analysis_and_position( + " //- /lib.rs mod <|>foo; //- /foo/mod.rs // empty - "); + ", + ); - let symbols = analysis.approximately_resolve_symbol(pos.file_id, pos.offset).unwrap(); + let symbols = analysis + .approximately_resolve_symbol(pos.file_id, pos.offset) + .unwrap(); assert_eq_dbg( r#"[(FileId(2), FileSymbol { name: "foo", node_range: [0; 0), kind: MODULE })]"#, &symbols, @@ -73,12 +84,14 @@ fn test_unresolved_module_diagnostic_no_diag_for_inline_mode() { #[test] fn test_resolve_parent_module() { - let (analysis, pos) = analysis_and_position(" + let (analysis, pos) = analysis_and_position( + " //- /lib.rs mod foo; //- /foo.rs <|>// empty - "); + ", + ); let symbols = analysis.parent_module(pos.file_id).unwrap(); assert_eq_dbg( r#"[(FileId(1), FileSymbol { name: "foo", node_range: [0; 8), kind: MODULE })]"#, @@ -88,12 +101,14 @@ fn test_resolve_parent_module() { #[test] fn test_resolve_crate_root() { - let mock = MockAnalysis::with_files(" + let mock = MockAnalysis::with_files( + " //- /lib.rs mod foo; //- /foo.rs // emtpy <|> - "); + ", + ); let root_file = mock.id_of("/lib.rs"); let mod_file = mock.id_of("/foo.rs"); let mut host = mock.analysis_host(); @@ -245,8 +260,10 @@ pub fn do() { assert_eq!(desc.ret_type, Some("-> i32".to_string())); assert_eq!(param, Some(0)); assert_eq!(desc.label, "pub fn add_one(x: i32) -> i32".to_string()); - assert_eq!(desc.doc, Some( -r#"Adds one to the number given. + assert_eq!( + desc.doc, + Some( + r#"Adds one to the number given. # Examples @@ -254,7 +271,10 @@ r#"Adds one to the number given. let five = 5; assert_eq!(6, my_crate::add_one(5)); -```"#.into())); +```"# + .into() + ) + ); } #[test] @@ -280,15 +300,18 @@ impl addr { pub fn do_it() { addr {}; addr::add_one(<|>); -}"#); +}"#, + ); assert_eq!(desc.name, "add_one".to_string()); assert_eq!(desc.params, vec!["x".to_string()]); assert_eq!(desc.ret_type, Some("-> i32".to_string())); assert_eq!(param, Some(0)); assert_eq!(desc.label, "pub fn add_one(x: i32) -> i32".to_string()); - assert_eq!(desc.doc, Some( -r#"Adds one to the number given. + assert_eq!( + desc.doc, + Some( + r#"Adds one to the number given. # Examples @@ -296,7 +319,10 @@ r#"Adds one to the number given. let five = 5; assert_eq!(6, my_crate::add_one(5)); -```"#.into())); +```"# + .into() + ) + ); } #[test] @@ -329,22 +355,32 @@ pub fn foo() { r.finished(<|>); } -"#); +"#, + ); assert_eq!(desc.name, "finished".to_string()); - assert_eq!(desc.params, vec!["&mut self".to_string(), "ctx".to_string()]); + assert_eq!( + desc.params, + vec!["&mut self".to_string(), "ctx".to_string()] + ); assert_eq!(desc.ret_type, None); assert_eq!(param, Some(1)); - assert_eq!(desc.doc, Some( -r#"Method is called when writer finishes. - -By default this method stops actor's `Context`."#.into())); + assert_eq!( + desc.doc, + Some( + r#"Method is called when writer finishes. + +By default this method stops actor's `Context`."# + .into() + ) + ); } - fn get_all_refs(text: &str) -> Vec<(FileId, TextRange)> { let (analysis, position) = single_file_with_position(text); - analysis.find_all_refs(position.file_id, position.offset).unwrap() + analysis + .find_all_refs(position.file_id, position.offset) + .unwrap() } #[test] @@ -390,14 +426,19 @@ fn test_find_all_refs_for_fn_param() { #[test] fn test_complete_crate_path() { - let (analysis, position) = analysis_and_position(" + let (analysis, position) = analysis_and_position( + " //- /lib.rs mod foo; struct Spam; //- /foo.rs use crate::Sp<|> - "); - let completions = analysis.completions(position.file_id, position.offset).unwrap().unwrap(); + ", + ); + let completions = analysis + .completions(position.file_id, position.offset) + .unwrap() + .unwrap(); assert_eq_dbg( r#"[CompletionItem { label: "foo", lookup: None, snippet: None }, CompletionItem { label: "Spam", lookup: None, snippet: None }]"#, diff --git a/crates/ra_editor/src/folding_ranges.rs b/crates/ra_editor/src/folding_ranges.rs index 8b79ea874..0803c8891 100644 --- a/crates/ra_editor/src/folding_ranges.rs +++ b/crates/ra_editor/src/folding_ranges.rs @@ -174,8 +174,16 @@ mod tests { let file = File::parse(&text); let folds = folding_ranges(&file); - assert_eq!(folds.len(), ranges.len(), "The amount of folds is different than the expected amount"); - assert_eq!(folds.len(), fold_kinds.len(), "The amount of fold kinds is different than the expected amount"); + assert_eq!( + folds.len(), + ranges.len(), + "The amount of folds is different than the expected amount" + ); + assert_eq!( + folds.len(), + fold_kinds.len(), + "The amount of fold kinds is different than the expected amount" + ); for ((fold, range), fold_kind) in folds .into_iter() .zip(ranges.into_iter()) diff --git a/crates/ra_editor/src/lib.rs b/crates/ra_editor/src/lib.rs index 02a1b2d45..481f4c9fc 100644 --- a/crates/ra_editor/src/lib.rs +++ b/crates/ra_editor/src/lib.rs @@ -148,8 +148,6 @@ pub fn find_node_at_offset<'a, N: AstNode<'a>>( leaf.ancestors().filter_map(N::cast).next() } - - #[cfg(test)] mod tests { use super::*; diff --git a/crates/ra_editor/src/symbols.rs b/crates/ra_editor/src/symbols.rs index 0bab9dd67..4e602d0e3 100644 --- a/crates/ra_editor/src/symbols.rs +++ b/crates/ra_editor/src/symbols.rs @@ -1,12 +1,9 @@ use crate::TextRange; use ra_syntax::{ - algo::{ - visit::{visitor, Visitor}, - }, + algo::visit::{visitor, Visitor}, ast::{self, NameOwner}, - AstNode, File, SmolStr, SyntaxKind, SyntaxNodeRef, - WalkEvent, + AstNode, File, SmolStr, SyntaxKind, SyntaxNodeRef, WalkEvent, }; #[derive(Debug, Clone)] @@ -54,7 +51,6 @@ pub fn file_structure(file: &File) -> Vec { let mut res = Vec::new(); let mut stack = Vec::new(); - for event in file.syntax().preorder() { match event { WalkEvent::Enter(node) => { @@ -63,7 +59,7 @@ pub fn file_structure(file: &File) -> Vec { stack.push(res.len()); res.push(symbol); } - }, + } WalkEvent::Leave(node) => { if structure_node(node).is_some() { stack.pop().unwrap(); diff --git a/crates/ra_lsp_server/src/caps.rs b/crates/ra_lsp_server/src/caps.rs index b6436b646..ac6aacfd3 100644 --- a/crates/ra_lsp_server/src/caps.rs +++ b/crates/ra_lsp_server/src/caps.rs @@ -1,8 +1,8 @@ use languageserver_types::{ CodeActionProviderCapability, CompletionOptions, DocumentOnTypeFormattingOptions, - ExecuteCommandOptions, FoldingRangeProviderCapability, ServerCapabilities, - SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind, - TextDocumentSyncOptions, RenameProviderCapability, RenameOptions + ExecuteCommandOptions, FoldingRangeProviderCapability, RenameOptions, RenameProviderCapability, + ServerCapabilities, SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind, + TextDocumentSyncOptions, }; pub fn server_capabilities() -> ServerCapabilities { @@ -40,8 +40,8 @@ pub fn server_capabilities() -> ServerCapabilities { more_trigger_character: None, }), folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)), - rename_provider: Some(RenameProviderCapability::Options(RenameOptions{ - prepare_provider: Some(true) + rename_provider: Some(RenameProviderCapability::Options(RenameOptions { + prepare_provider: Some(true), })), color_provider: None, execute_command_provider: Some(ExecuteCommandOptions { diff --git a/crates/ra_lsp_server/src/conv.rs b/crates/ra_lsp_server/src/conv.rs index bc0cf7c68..84ffac2da 100644 --- a/crates/ra_lsp_server/src/conv.rs +++ b/crates/ra_lsp_server/src/conv.rs @@ -192,7 +192,8 @@ impl TryConvWith for SourceChange { .map(|it| it.edits.as_slice()) .unwrap_or(&[]); let line_col = translate_offset_with_edit(&*line_index, pos.offset, edits); - let position = Position::new(u64::from(line_col.line), u64::from(u32::from(line_col.col))); + let position = + Position::new(u64::from(line_col.line), u64::from(u32::from(line_col.col))); Some(TextDocumentPositionParams { text_document: TextDocumentIdentifier::new(pos.file_id.try_conv_with(world)?), position, diff --git a/crates/ra_lsp_server/src/lib.rs b/crates/ra_lsp_server/src/lib.rs index ce77b2a33..f9481e04d 100644 --- a/crates/ra_lsp_server/src/lib.rs +++ b/crates/ra_lsp_server/src/lib.rs @@ -36,4 +36,4 @@ pub mod thread_watcher; mod vfs; pub type Result = ::std::result::Result; -pub use crate::{caps::server_capabilities, main_loop::main_loop, main_loop::LspError}; \ No newline at end of file +pub use crate::{caps::server_capabilities, main_loop::main_loop, main_loop::LspError}; diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs index 20cb5f772..c853ff653 100644 --- a/crates/ra_lsp_server/src/main_loop/handlers.rs +++ b/crates/ra_lsp_server/src/main_loop/handlers.rs @@ -1,15 +1,16 @@ use std::collections::HashMap; -use rustc_hash::FxHashMap; +use gen_lsp_server::ErrorCode; use languageserver_types::{ CodeActionResponse, Command, CompletionItem, CompletionItemKind, Diagnostic, - DiagnosticSeverity, DocumentSymbol, FoldingRange, FoldingRangeKind, FoldingRangeParams, - InsertTextFormat, Location, Position, SymbolInformation, TextDocumentIdentifier, TextEdit, - RenameParams, WorkspaceEdit, PrepareRenameResponse, Documentation, MarkupContent, MarkupKind + DiagnosticSeverity, DocumentSymbol, Documentation, FoldingRange, FoldingRangeKind, + FoldingRangeParams, InsertTextFormat, Location, MarkupContent, MarkupKind, Position, + PrepareRenameResponse, RenameParams, SymbolInformation, TextDocumentIdentifier, TextEdit, + WorkspaceEdit, }; -use gen_lsp_server::ErrorCode; use ra_analysis::{FileId, FoldKind, Query, RunnableKind}; use ra_syntax::text_utils::contains_offset_nonstrict; +use rustc_hash::FxHashMap; use serde_json::to_value; use crate::{ @@ -17,13 +18,10 @@ use crate::{ project_model::TargetKind, req::{self, Decoration}, server_world::ServerWorld, - Result, LspError + LspError, Result, }; -pub fn handle_syntax_tree( - world: ServerWorld, - params: req::SyntaxTreeParams, -) -> Result { +pub fn handle_syntax_tree(world: ServerWorld, params: req::SyntaxTreeParams) -> Result { let id = params.text_document.try_conv_with(&world)?; let res = world.analysis().syntax_tree(id); Ok(res) @@ -182,10 +180,7 @@ pub fn handle_workspace_symbol( return Ok(Some(res)); - fn exec_query( - world: &ServerWorld, - query: Query, - ) -> Result> { + fn exec_query(world: &ServerWorld, query: Query) -> Result> { let mut res = Vec::new(); for (file_id, symbol) in world.analysis().symbol_search(query)? { let line_index = world.analysis().file_line_index(file_id); @@ -290,7 +285,11 @@ pub fn handle_runnables( }); return Ok(res); - fn runnable_args(world: &ServerWorld, file_id: FileId, kind: &RunnableKind) -> Result> { + fn runnable_args( + world: &ServerWorld, + file_id: FileId, + kind: &RunnableKind, + ) -> Result> { let spec = CargoTargetSpec::for_file(world, file_id)?; let mut res = Vec::new(); match kind { @@ -327,18 +326,15 @@ pub fn handle_runnables( }; let file_id = world.analysis().crate_root(crate_id)?; let path = world.path_map.get_path(file_id); - let res = world - .workspaces - .iter() - .find_map(|ws| { - let tgt = ws.target_by_root(path)?; - let res = CargoTargetSpec { - package: tgt.package(ws).name(ws).to_string(), - target: tgt.name(ws).to_string(), - target_kind: tgt.kind(ws), - }; - Some(res) - }); + let res = world.workspaces.iter().find_map(|ws| { + let tgt = ws.target_by_root(path)?; + let res = CargoTargetSpec { + package: tgt.package(ws).name(ws).to_string(), + target: tgt.name(ws).to_string(), + target_kind: tgt.kind(ws), + }; + Some(res) + }); Ok(res) } @@ -367,7 +363,6 @@ pub fn handle_runnables( } TargetKind::Other => (), } - } } } @@ -453,9 +448,7 @@ pub fn handle_signature_help( let line_index = world.analysis().file_line_index(file_id); let offset = params.position.conv_with(&line_index); - if let Some((descriptor, active_param)) = - world.analysis().resolve_callable(file_id, offset)? - { + if let Some((descriptor, active_param)) = world.analysis().resolve_callable(file_id, offset)? { let parameters: Vec = descriptor .params .iter() @@ -468,7 +461,7 @@ pub fn handle_signature_help( let documentation = if let Some(doc) = descriptor.doc { Some(Documentation::MarkupContent(MarkupContent { kind: MarkupKind::Markdown, - value: doc + value: doc, })) } else { None @@ -511,16 +504,17 @@ pub fn handle_prepare_rename( Ok(Some(PrepareRenameResponse::Range(loc.range))) } -pub fn handle_rename( - world: ServerWorld, - params: RenameParams, -) -> Result> { +pub fn handle_rename(world: ServerWorld, params: RenameParams) -> Result> { let file_id = params.text_document.try_conv_with(&world)?; let line_index = world.analysis().file_line_index(file_id); let offset = params.position.conv_with(&line_index); if params.new_name.is_empty() { - return Err(LspError::new(ErrorCode::InvalidParams as i32, "New Name cannot be empty".into()).into()); + return Err(LspError::new( + ErrorCode::InvalidParams as i32, + "New Name cannot be empty".into(), + ) + .into()); } let refs = world.analysis().find_all_refs(file_id, offset)?; @@ -531,11 +525,10 @@ pub fn handle_rename( let mut changes = HashMap::new(); for r in refs { if let Ok(loc) = to_location(r.0, r.1, &world, &line_index) { - changes.entry(loc.uri).or_insert(Vec::new()).push( - TextEdit { - range: loc.range, - new_text: params.new_name.clone() - }); + changes.entry(loc.uri).or_insert(Vec::new()).push(TextEdit { + range: loc.range, + new_text: params.new_name.clone(), + }); } } @@ -543,7 +536,7 @@ pub fn handle_rename( changes: Some(changes), // TODO: return this instead if client/server support it. See #144 - document_changes : None, + document_changes: None, })) } @@ -557,9 +550,11 @@ pub fn handle_references( let refs = world.analysis().find_all_refs(file_id, offset)?; - Ok(Some(refs.into_iter() - .filter_map(|r| to_location(r.0, r.1, &world, &line_index).ok()) - .collect())) + Ok(Some( + refs.into_iter() + .filter_map(|r| to_location(r.0, r.1, &world, &line_index).ok()) + .collect(), + )) } pub fn handle_code_action( diff --git a/crates/ra_lsp_server/src/main_loop/mod.rs b/crates/ra_lsp_server/src/main_loop/mod.rs index 9ddc3fd0b..c568706bd 100644 --- a/crates/ra_lsp_server/src/main_loop/mod.rs +++ b/crates/ra_lsp_server/src/main_loop/mod.rs @@ -24,7 +24,10 @@ use crate::{ }; #[derive(Debug, Fail)] -#[fail(display = "Language Server request failed with {}. ({})", code, message)] +#[fail( + display = "Language Server request failed with {}. ({})", + code, message +)] pub struct LspError { pub code: i32, pub message: String, @@ -32,7 +35,7 @@ pub struct LspError { impl LspError { pub fn new(code: i32, message: String) -> LspError { - LspError {code, message} + LspError { code, message } } } @@ -214,11 +217,7 @@ fn main_loop_inner( } } -fn on_task( - task: Task, - msg_sender: &Sender, - pending_requests: &mut FxHashSet, -) { +fn on_task(task: Task, msg_sender: &Sender, pending_requests: &mut FxHashSet) { match task { Task::Respond(response) => { if pending_requests.remove(&response.id) { @@ -373,12 +372,16 @@ impl<'a> PoolDispatcher<'a> { self.pool.spawn(move || { let resp = match f(world, params) { Ok(resp) => RawResponse::ok::(id, &resp), - Err(e) => { - match e.downcast::() { - Ok(lsp_error) => RawResponse::err(id, lsp_error.code, lsp_error.message), - Err(e) => RawResponse::err(id, ErrorCode::InternalError as i32, format!("{}\n{}", e, e.backtrace())) + Err(e) => match e.downcast::() { + Ok(lsp_error) => { + RawResponse::err(id, lsp_error.code, lsp_error.message) } - } + Err(e) => RawResponse::err( + id, + ErrorCode::InternalError as i32, + format!("{}\n{}", e, e.backtrace()), + ), + }, }; let task = Task::Respond(resp); sender.send(task); @@ -412,7 +415,7 @@ fn update_file_notifications_on_threadpool( if !is_canceled(&e) { error!("failed to compute diagnostics: {:?}", e); } - }, + } Ok(params) => { let not = RawNotification::new::(¶ms); sender.send(Task::Notify(not)); @@ -423,7 +426,7 @@ fn update_file_notifications_on_threadpool( if !is_canceled(&e) { error!("failed to compute decorations: {:?}", e); } - }, + } Ok(params) => { let not = RawNotification::new::(¶ms); sender.send(Task::Notify(not)) diff --git a/crates/ra_lsp_server/src/path_map.rs b/crates/ra_lsp_server/src/path_map.rs index d5957d673..dd09fa10a 100644 --- a/crates/ra_lsp_server/src/path_map.rs +++ b/crates/ra_lsp_server/src/path_map.rs @@ -33,7 +33,8 @@ impl PathMap { } pub fn get_or_insert(&mut self, path: PathBuf, root: Root) -> (bool, FileId) { let mut inserted = false; - let file_id = self.path2id + let file_id = self + .path2id .get(path.as_path()) .map(|&id| id) .unwrap_or_else(|| { diff --git a/crates/ra_lsp_server/src/req.rs b/crates/ra_lsp_server/src/req.rs index 6cd04d84c..9d911912d 100644 --- a/crates/ra_lsp_server/src/req.rs +++ b/crates/ra_lsp_server/src/req.rs @@ -6,8 +6,8 @@ pub use languageserver_types::{ notification::*, request::*, ApplyWorkspaceEditParams, CodeActionParams, CompletionParams, CompletionResponse, DocumentOnTypeFormattingParams, DocumentSymbolParams, DocumentSymbolResponse, ExecuteCommandParams, Hover, InitializeResult, - PublishDiagnosticsParams, SignatureHelp, TextDocumentEdit, TextDocumentPositionParams, - TextEdit, WorkspaceSymbolParams, ReferenceParams, + PublishDiagnosticsParams, ReferenceParams, SignatureHelp, TextDocumentEdit, + TextDocumentPositionParams, TextEdit, WorkspaceSymbolParams, }; pub enum SyntaxTree {} diff --git a/crates/ra_lsp_server/src/server_world.rs b/crates/ra_lsp_server/src/server_world.rs index 25986e230..3e76d2527 100644 --- a/crates/ra_lsp_server/src/server_world.rs +++ b/crates/ra_lsp_server/src/server_world.rs @@ -5,7 +5,9 @@ use std::{ }; use languageserver_types::Url; -use ra_analysis::{Analysis, AnalysisHost, AnalysisChange, CrateGraph, FileId, FileResolver, LibraryData}; +use ra_analysis::{ + Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, FileResolver, LibraryData, +}; use rustc_hash::FxHashMap; use crate::{ @@ -65,9 +67,7 @@ impl ServerWorldState { Some((file_id, text)) } }) - .for_each(|(file_id, text)| { - change.add_file(file_id, text) - }); + .for_each(|(file_id, text)| change.add_file(file_id, text)); } if inserted { change.set_file_resolver(Arc::new(self.path_map.clone())) diff --git a/crates/ra_syntax/src/algo/mod.rs b/crates/ra_syntax/src/algo/mod.rs index f92529d3e..faf5a6211 100644 --- a/crates/ra_syntax/src/algo/mod.rs +++ b/crates/ra_syntax/src/algo/mod.rs @@ -1,10 +1,7 @@ pub mod visit; // pub mod walk; -use crate::{ - text_utils::{contains_offset_nonstrict}, - SyntaxNodeRef, TextRange, TextUnit, -}; +use crate::{text_utils::contains_offset_nonstrict, SyntaxNodeRef, TextRange, TextUnit}; pub fn find_leaf_at_offset(node: SyntaxNodeRef, offset: TextUnit) -> LeafAtOffset { let range = node.range(); diff --git a/crates/ra_syntax/src/ast/mod.rs b/crates/ra_syntax/src/ast/mod.rs index 3aa11b9dd..688ffff47 100644 --- a/crates/ra_syntax/src/ast/mod.rs +++ b/crates/ra_syntax/src/ast/mod.rs @@ -66,7 +66,9 @@ pub trait AttrsOwner<'a>: AstNode<'a> { } pub trait DocCommentsOwner<'a>: AstNode<'a> { - fn doc_comments(self) -> AstChildren<'a, Comment<'a>> { children(self) } + fn doc_comments(self) -> AstChildren<'a, Comment<'a>> { + children(self) + } /// Returns the textual content of a doc comment block as a single string. /// That is, strips leading `///` and joins lines @@ -74,12 +76,15 @@ pub trait DocCommentsOwner<'a>: AstNode<'a> { self.doc_comments() .map(|comment| { let prefix = comment.prefix(); - let trimmed = comment.text().as_str() + let trimmed = comment + .text() + .as_str() .trim() .trim_start_matches(prefix) .trim_start(); trimmed.to_owned() - }).join("\n") + }) + .join("\n") } } @@ -250,7 +255,6 @@ impl<'a> IfExpr<'a> { } } - #[derive(Debug, Clone, Copy)] pub enum PathSegmentKind<'a> { Name(NameRef<'a>), @@ -261,7 +265,9 @@ pub enum PathSegmentKind<'a> { impl<'a> PathSegment<'a> { pub fn parent_path(self) -> Path<'a> { - self.syntax().parent().and_then(Path::cast) + self.syntax() + .parent() + .and_then(Path::cast) .expect("segments are always nested in paths") } diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index 3698eccd7..79394fd53 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs @@ -52,7 +52,9 @@ pub use crate::{ reparsing::AtomEdit, rowan::{SmolStr, TextRange, TextUnit}, syntax_kinds::SyntaxKind, - yellow::{Direction, OwnedRoot, RefRoot, SyntaxError, SyntaxNode, SyntaxNodeRef, TreeRoot, WalkEvent}, + yellow::{ + Direction, OwnedRoot, RefRoot, SyntaxError, SyntaxNode, SyntaxNodeRef, TreeRoot, WalkEvent, + }, }; use crate::yellow::GreenNode; diff --git a/crates/ra_syntax/src/utils.rs b/crates/ra_syntax/src/utils.rs index 8ee02724d..00f00139a 100644 --- a/crates/ra_syntax/src/utils.rs +++ b/crates/ra_syntax/src/utils.rs @@ -1,6 +1,4 @@ -use crate::{ - File, SyntaxKind, SyntaxNodeRef, WalkEvent -}; +use crate::{File, SyntaxKind, SyntaxNodeRef, WalkEvent}; use std::fmt::Write; /// Parse a file and create a string representation of the resulting parse tree. diff --git a/crates/test_utils/src/lib.rs b/crates/test_utils/src/lib.rs index 8980f077f..d9fbb9b5b 100644 --- a/crates/test_utils/src/lib.rs +++ b/crates/test_utils/src/lib.rs @@ -89,7 +89,6 @@ pub fn add_cursor(text: &str, offset: TextUnit) -> String { res } - #[derive(Debug)] pub struct FixtureEntry { pub meta: String, @@ -112,25 +111,29 @@ pub fn parse_fixture(fixture: &str) -> Vec { macro_rules! flush { () => { if let Some(meta) = meta { - res.push(FixtureEntry { meta: meta.to_string(), text: buf.clone() }); + res.push(FixtureEntry { + meta: meta.to_string(), + text: buf.clone(), + }); buf.clear(); } }; }; - let margin = fixture.lines() + let margin = fixture + .lines() .filter(|it| it.trim_start().starts_with("//-")) .map(|it| it.len() - it.trim_start().len()) - .next().expect("empty fixture"); - let lines = fixture.lines() - .filter_map(|line| { - if line.len() >= margin { - assert!(line[..margin].trim().is_empty()); - Some(&line[margin..]) - } else { - assert!(line.trim().is_empty()); - None - } - }); + .next() + .expect("empty fixture"); + let lines = fixture.lines().filter_map(|line| { + if line.len() >= margin { + assert!(line[..margin].trim().is_empty()); + Some(&line[margin..]) + } else { + assert!(line.trim().is_empty()); + None + } + }); for line in lines { if line.starts_with("//-") { diff --git a/crates/tools/src/lib.rs b/crates/tools/src/lib.rs index 29c46c7c4..3387d0620 100644 --- a/crates/tools/src/lib.rs +++ b/crates/tools/src/lib.rs @@ -1,5 +1,5 @@ -extern crate itertools; extern crate failure; +extern crate itertools; extern crate teraron; use std::{ @@ -7,10 +7,10 @@ use std::{ process::Command, }; -use itertools::Itertools; use failure::bail; +use itertools::Itertools; -pub use teraron::{Mode, Verify, Overwrite}; +pub use teraron::{Mode, Overwrite, Verify}; pub type Result = ::std::result::Result; @@ -63,16 +63,8 @@ pub fn generate(mode: Mode) -> Result<()> { let grammar = project_root().join(GRAMMAR); let syntax_kinds = project_root().join(SYNTAX_KINDS); let ast = project_root().join(AST); - teraron::generate( - &syntax_kinds, - &grammar, - mode, - )?; - teraron::generate( - &ast, - &grammar, - mode, - )?; + teraron::generate(&syntax_kinds, &grammar, mode)?; + teraron::generate(&ast, &grammar, mode)?; Ok(()) } @@ -101,9 +93,18 @@ pub fn run(cmdline: &str, dir: &str) -> Result<()> { pub fn run_rustfmt(mode: Mode) -> Result<()> { run(&format!("rustup install {}", TOOLCHAIN), ".")?; - run(&format!("rustup component add rustfmt-preview --toolchain {}", TOOLCHAIN), ".")?; + run( + &format!( + "rustup component add rustfmt-preview --toolchain {}", + TOOLCHAIN + ), + ".", + )?; if mode == Verify { - run(&format!("rustup run {} -- cargo fmt -- --check", TOOLCHAIN), ".")?; + run( + &format!("rustup run {} -- cargo fmt -- --check", TOOLCHAIN), + ".", + )?; } else { run(&format!("rustup run {} -- cargo fmt", TOOLCHAIN), ".")?; } diff --git a/crates/tools/src/main.rs b/crates/tools/src/main.rs index 91675bbf0..dc623a464 100644 --- a/crates/tools/src/main.rs +++ b/crates/tools/src/main.rs @@ -1,19 +1,17 @@ extern crate clap; extern crate failure; +extern crate teraron; extern crate tools; extern crate walkdir; -extern crate teraron; use clap::{App, Arg, SubCommand}; +use failure::bail; use std::{ collections::HashMap, fs, path::{Path, PathBuf}, }; -use tools::{ - collect_tests, Result, Test, generate, Mode, Overwrite, Verify, run, run_rustfmt, -}; -use failure::bail; +use tools::{collect_tests, generate, run, run_rustfmt, Mode, Overwrite, Result, Test, Verify}; const GRAMMAR_DIR: &str = "./crates/ra_syntax/src/grammar"; const INLINE_TESTS_DIR: &str = "./crates/ra_syntax/tests/data/parser/inline"; diff --git a/crates/tools/tests/cli.rs b/crates/tools/tests/cli.rs index 8c53a8230..2d238d9ea 100644 --- a/crates/tools/tests/cli.rs +++ b/crates/tools/tests/cli.rs @@ -1,8 +1,6 @@ extern crate tools; -use tools::{ - generate, Verify, run_rustfmt, -}; +use tools::{generate, run_rustfmt, Verify}; #[test] fn verify_template_generation() { @@ -14,6 +12,9 @@ fn verify_template_generation() { #[test] fn check_code_formatting() { if let Err(error) = run_rustfmt(Verify) { - panic!("{}. Please format the code by running `cargo format`", error); + panic!( + "{}. Please format the code by running `cargo format`", + error + ); } } -- cgit v1.2.3