From c3a4c4429de83450654795534e64e878a774a088 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Tue, 18 Feb 2020 18:35:10 +0100 Subject: Refactor primary IDE API This introduces the new type -- Semantics. Semantics maps SyntaxNodes to various semantic info, such as type, name resolution or macro expansions. To do so, Semantics maintains a HashMap which maps every node it saw to the file from which the node originated. This is enough to get all the necessary hir bits just from syntax. --- crates/ra_ide/src/call_hierarchy.rs | 42 +++---- crates/ra_ide/src/call_info.rs | 27 +++-- crates/ra_ide/src/completion.rs | 4 +- crates/ra_ide/src/completion/complete_dot.rs | 4 +- .../completion/complete_macro_in_item_position.rs | 2 +- crates/ra_ide/src/completion/complete_path.rs | 4 +- crates/ra_ide/src/completion/complete_pattern.rs | 2 +- crates/ra_ide/src/completion/complete_postfix.rs | 2 +- .../src/completion/complete_record_literal.rs | 5 +- .../src/completion/complete_record_pattern.rs | 5 +- crates/ra_ide/src/completion/complete_scope.rs | 4 +- .../ra_ide/src/completion/complete_trait_impl.rs | 33 +++--- crates/ra_ide/src/completion/completion_context.rs | 65 ++++++----- crates/ra_ide/src/diagnostics.rs | 9 +- crates/ra_ide/src/display/navigation_target.rs | 8 +- crates/ra_ide/src/expand.rs | 102 ---------------- crates/ra_ide/src/expand_macro.rs | 29 ++--- crates/ra_ide/src/extend_selection.rs | 51 ++++---- crates/ra_ide/src/goto_definition.rs | 39 +++---- crates/ra_ide/src/goto_type_definition.rs | 36 +++--- crates/ra_ide/src/hover.rs | 45 ++++--- crates/ra_ide/src/impls.rs | 49 +++----- crates/ra_ide/src/inlay_hints.rs | 48 ++++---- crates/ra_ide/src/lib.rs | 5 +- crates/ra_ide/src/marks.rs | 1 + crates/ra_ide/src/parent_module.rs | 17 +-- crates/ra_ide/src/references.rs | 130 +++++++++++---------- crates/ra_ide/src/references/classify.rs | 30 +++-- crates/ra_ide/src/references/rename.rs | 28 ++--- crates/ra_ide/src/runnables.rs | 50 +++----- .../ra_ide/src/snapshots/rainbow_highlighting.html | 12 +- crates/ra_ide/src/syntax_highlighting.rs | 87 +++++++------- 32 files changed, 414 insertions(+), 561 deletions(-) delete mode 100644 crates/ra_ide/src/expand.rs (limited to 'crates/ra_ide/src') diff --git a/crates/ra_ide/src/call_hierarchy.rs b/crates/ra_ide/src/call_hierarchy.rs index 51ac59a71..b00b6d431 100644 --- a/crates/ra_ide/src/call_hierarchy.rs +++ b/crates/ra_ide/src/call_hierarchy.rs @@ -2,13 +2,13 @@ use indexmap::IndexMap; -use hir::db::AstDatabase; +use hir::Semantics; use ra_ide_db::RootDatabase; use ra_syntax::{ast, match_ast, AstNode, TextRange}; use crate::{ - call_info::FnCallNode, display::ToNav, expand::descend_into_macros, goto_definition, - references, FilePosition, NavigationTarget, RangeInfo, + call_info::FnCallNode, display::ToNav, goto_definition, references, FilePosition, + NavigationTarget, RangeInfo, }; #[derive(Debug, Clone)] @@ -38,30 +38,31 @@ pub(crate) fn call_hierarchy( } pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Option> { + let sema = Semantics::new(db); // 1. Find all refs // 2. Loop through refs and determine unique fndef. This will become our `from: CallHierarchyItem,` in the reply. // 3. Add ranges relative to the start of the fndef. let refs = references::find_all_refs(db, position, None)?; let mut calls = CallLocations::default(); - let mut sb = hir::SourceBinder::new(db); for reference in refs.info.references() { let file_id = reference.file_range.file_id; - let file = db.parse_or_expand(file_id.into())?; + let file = sema.parse(file_id); + let file = file.syntax(); let token = file.token_at_offset(reference.file_range.range.start()).next()?; - let token = descend_into_macros(db, file_id, token); - let syntax = token.value.parent(); + let token = sema.descend_into_macros(token); + let syntax = token.parent(); // This target is the containing function if let Some(nav) = syntax.ancestors().find_map(|node| { match_ast! { match node { ast::FnDef(it) => { - let def = sb.to_def(token.with_value(it))?; - Some(def.to_nav(sb.db)) + let def = sema.to_def(&it)?; + Some(def.to_nav(sema.db)) }, - _ => { None }, + _ => None, } } }) { @@ -74,11 +75,13 @@ pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Optio } pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option> { + let sema = Semantics::new(db); let file_id = position.file_id; - let file = db.parse_or_expand(file_id.into())?; + let file = sema.parse(file_id); + let file = file.syntax(); let token = file.token_at_offset(position.offset).next()?; - let token = descend_into_macros(db, file_id, token); - let syntax = token.value.parent(); + let token = sema.descend_into_macros(token); + let syntax = token.parent(); let mut calls = CallLocations::default(); @@ -87,14 +90,11 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio .filter_map(|node| FnCallNode::with_node_exact(&node)) .filter_map(|call_node| { let name_ref = call_node.name_ref()?; - let name_ref = token.with_value(name_ref.syntax()); - - let analyzer = hir::SourceAnalyzer::new(db, name_ref, None); if let Some(func_target) = match &call_node { FnCallNode::CallExpr(expr) => { //FIXME: Type::as_callable is broken - let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; + let callable_def = sema.type_of_expr(&expr.expr()?)?.as_callable()?; match callable_def { hir::CallableDef::FunctionId(it) => { let fn_def: hir::Function = it.into(); @@ -105,15 +105,15 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio } } FnCallNode::MethodCallExpr(expr) => { - let function = analyzer.resolve_method_call(&expr)?; + let function = sema.resolve_method_call(&expr)?; Some(function.to_nav(db)) } - FnCallNode::MacroCallExpr(expr) => { - let macro_def = analyzer.resolve_macro_call(db, name_ref.with_value(&expr))?; + FnCallNode::MacroCallExpr(macro_call) => { + let macro_def = sema.resolve_macro_call(¯o_call)?; Some(macro_def.to_nav(db)) } } { - Some((func_target, name_ref.value.text_range())) + Some((func_target, name_ref.syntax().text_range())) } else { None } diff --git a/crates/ra_ide/src/call_info.rs b/crates/ra_ide/src/call_info.rs index 7c6322cb4..9a1fc0d35 100644 --- a/crates/ra_ide/src/call_info.rs +++ b/crates/ra_ide/src/call_info.rs @@ -1,5 +1,5 @@ //! FIXME: write short doc here -use hir::db::AstDatabase; +use hir::Semantics; use ra_ide_db::RootDatabase; use ra_syntax::{ ast::{self, ArgListOwner}, @@ -7,24 +7,23 @@ use ra_syntax::{ }; use test_utils::tested_by; -use crate::{expand::descend_into_macros, CallInfo, FilePosition, FunctionSignature}; +use crate::{CallInfo, FilePosition, FunctionSignature}; /// Computes parameter information for the given call expression. pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option { - let file = db.parse_or_expand(position.file_id.into())?; + let sema = Semantics::new(db); + let file = sema.parse(position.file_id); + let file = file.syntax(); let token = file.token_at_offset(position.offset).next()?; - let token = descend_into_macros(db, position.file_id, token); + let token = sema.descend_into_macros(token); // Find the calling expression and it's NameRef - let calling_node = FnCallNode::with_node(&token.value.parent())?; - let name_ref = calling_node.name_ref()?; - let name_ref = token.with_value(name_ref.syntax()); + let calling_node = FnCallNode::with_node(&token.parent())?; - let analyzer = hir::SourceAnalyzer::new(db, name_ref, None); let (mut call_info, has_self) = match &calling_node { - FnCallNode::CallExpr(expr) => { + FnCallNode::CallExpr(call) => { //FIXME: Type::as_callable is broken - let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; + let callable_def = sema.type_of_expr(&call.expr()?)?.as_callable()?; match callable_def { hir::CallableDef::FunctionId(it) => { let fn_def = it.into(); @@ -36,12 +35,12 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option { - let function = analyzer.resolve_method_call(&expr)?; + FnCallNode::MethodCallExpr(method_call) => { + let function = sema.resolve_method_call(&method_call)?; (CallInfo::with_fn(db, function), function.has_self_param(db)) } - FnCallNode::MacroCallExpr(expr) => { - let macro_def = analyzer.resolve_macro_call(db, name_ref.with_value(&expr))?; + FnCallNode::MacroCallExpr(macro_call) => { + let macro_def = sema.resolve_macro_call(¯o_call)?; (CallInfo::with_macro(db, macro_def)?, false) } }; diff --git a/crates/ra_ide/src/completion.rs b/crates/ra_ide/src/completion.rs index 4bdc6ba23..c378c2c62 100644 --- a/crates/ra_ide/src/completion.rs +++ b/crates/ra_ide/src/completion.rs @@ -17,7 +17,6 @@ mod complete_postfix; mod complete_macro_in_item_position; mod complete_trait_impl; -use ra_db::SourceDatabase; use ra_ide_db::RootDatabase; #[cfg(test)] @@ -57,8 +56,7 @@ pub use crate::completion::completion_item::{ /// identifier prefix/fuzzy match should be done higher in the stack, together /// with ordering of completions (currently this is done by the client). pub(crate) fn completions(db: &RootDatabase, position: FilePosition) -> Option { - let original_parse = db.parse(position.file_id); - let ctx = CompletionContext::new(db, &original_parse, position)?; + let ctx = CompletionContext::new(db, position)?; let mut acc = Completions::default(); diff --git a/crates/ra_ide/src/completion/complete_dot.rs b/crates/ra_ide/src/completion/complete_dot.rs index 2ca78c927..a6e0158b2 100644 --- a/crates/ra_ide/src/completion/complete_dot.rs +++ b/crates/ra_ide/src/completion/complete_dot.rs @@ -16,7 +16,7 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) { _ => return, }; - let receiver_ty = match ctx.analyzer.type_of(ctx.db, &dot_receiver) { + let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) { Some(ty) => ty, _ => return, }; @@ -55,7 +55,7 @@ fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: &Ty fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) { if let Some(krate) = ctx.module.map(|it| it.krate()) { let mut seen_methods = FxHashSet::default(); - let traits_in_scope = ctx.analyzer.traits_in_scope(ctx.db); + let traits_in_scope = ctx.scope().traits_in_scope(); receiver.iterate_method_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, func| { if func.has_self_param(ctx.db) && seen_methods.insert(func.name(ctx.db)) { acc.add_function(ctx, func); diff --git a/crates/ra_ide/src/completion/complete_macro_in_item_position.rs b/crates/ra_ide/src/completion/complete_macro_in_item_position.rs index faadd1e3f..1866d9e6c 100644 --- a/crates/ra_ide/src/completion/complete_macro_in_item_position.rs +++ b/crates/ra_ide/src/completion/complete_macro_in_item_position.rs @@ -5,7 +5,7 @@ use crate::completion::{CompletionContext, Completions}; pub(super) fn complete_macro_in_item_position(acc: &mut Completions, ctx: &CompletionContext) { // Show only macros in top level. if ctx.is_new_item { - ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { + ctx.scope().process_all_names(&mut |name, res| { if let hir::ScopeDef::MacroDef(mac) = res { acc.add_macro(ctx, Some(name.to_string()), mac); } diff --git a/crates/ra_ide/src/completion/complete_path.rs b/crates/ra_ide/src/completion/complete_path.rs index 2d7f09a6c..c626e90cc 100644 --- a/crates/ra_ide/src/completion/complete_path.rs +++ b/crates/ra_ide/src/completion/complete_path.rs @@ -11,7 +11,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) { Some(path) => path.clone(), _ => return, }; - let def = match ctx.analyzer.resolve_hir_path(ctx.db, &path) { + let def = match ctx.scope().resolve_hir_path(&path) { Some(PathResolution::Def(def)) => def, _ => return, }; @@ -49,7 +49,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) { // FIXME: complete T::AssocType let krate = ctx.module.map(|m| m.krate()); if let Some(krate) = krate { - let traits_in_scope = ctx.analyzer.traits_in_scope(ctx.db); + let traits_in_scope = ctx.scope().traits_in_scope(); ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| { match item { hir::AssocItem::Function(func) => { diff --git a/crates/ra_ide/src/completion/complete_pattern.rs b/crates/ra_ide/src/completion/complete_pattern.rs index fd03b1c40..c2c6ca002 100644 --- a/crates/ra_ide/src/completion/complete_pattern.rs +++ b/crates/ra_ide/src/completion/complete_pattern.rs @@ -9,7 +9,7 @@ pub(super) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) { } // FIXME: ideally, we should look at the type we are matching against and // suggest variants + auto-imports - ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { + ctx.scope().process_all_names(&mut |name, res| { let def = match &res { hir::ScopeDef::ModuleDef(def) => def, _ => return, diff --git a/crates/ra_ide/src/completion/complete_postfix.rs b/crates/ra_ide/src/completion/complete_postfix.rs index 5470dc291..8a74f993a 100644 --- a/crates/ra_ide/src/completion/complete_postfix.rs +++ b/crates/ra_ide/src/completion/complete_postfix.rs @@ -29,7 +29,7 @@ pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { dot_receiver.syntax().text().to_string() }; - let receiver_ty = match ctx.analyzer.type_of(ctx.db, &dot_receiver) { + let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) { Some(it) => it, None => return, }; diff --git a/crates/ra_ide/src/completion/complete_record_literal.rs b/crates/ra_ide/src/completion/complete_record_literal.rs index 577c394d2..f98353d76 100644 --- a/crates/ra_ide/src/completion/complete_record_literal.rs +++ b/crates/ra_ide/src/completion/complete_record_literal.rs @@ -5,10 +5,7 @@ use crate::completion::{CompletionContext, Completions}; /// Complete fields in fields literals. pub(super) fn complete_record_literal(acc: &mut Completions, ctx: &CompletionContext) { let (ty, variant) = match ctx.record_lit_syntax.as_ref().and_then(|it| { - Some(( - ctx.analyzer.type_of(ctx.db, &it.clone().into())?, - ctx.analyzer.resolve_record_literal(it)?, - )) + Some((ctx.sema.type_of_expr(&it.clone().into())?, ctx.sema.resolve_record_literal(it)?)) }) { Some(it) => it, _ => return, diff --git a/crates/ra_ide/src/completion/complete_record_pattern.rs b/crates/ra_ide/src/completion/complete_record_pattern.rs index a56c7e3a1..9bdeae49f 100644 --- a/crates/ra_ide/src/completion/complete_record_pattern.rs +++ b/crates/ra_ide/src/completion/complete_record_pattern.rs @@ -4,10 +4,7 @@ use crate::completion::{CompletionContext, Completions}; pub(super) fn complete_record_pattern(acc: &mut Completions, ctx: &CompletionContext) { let (ty, variant) = match ctx.record_lit_pat.as_ref().and_then(|it| { - Some(( - ctx.analyzer.type_of_pat(ctx.db, &it.clone().into())?, - ctx.analyzer.resolve_record_pattern(it)?, - )) + Some((ctx.sema.type_of_pat(&it.clone().into())?, ctx.sema.resolve_record_pattern(it)?)) }) { Some(it) => it, _ => return, diff --git a/crates/ra_ide/src/completion/complete_scope.rs b/crates/ra_ide/src/completion/complete_scope.rs index e2ee86dd1..aad016d4a 100644 --- a/crates/ra_ide/src/completion/complete_scope.rs +++ b/crates/ra_ide/src/completion/complete_scope.rs @@ -7,9 +7,7 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) { return; } - ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { - acc.add_resolution(ctx, name.to_string(), &res) - }); + ctx.scope().process_all_names(&mut |name, res| acc.add_resolution(ctx, name.to_string(), &res)); } #[cfg(test)] diff --git a/crates/ra_ide/src/completion/complete_trait_impl.rs b/crates/ra_ide/src/completion/complete_trait_impl.rs index 83628e35c..9a27c164b 100644 --- a/crates/ra_ide/src/completion/complete_trait_impl.rs +++ b/crates/ra_ide/src/completion/complete_trait_impl.rs @@ -64,11 +64,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext if let (Some(trigger), Some(impl_block)) = (trigger, impl_block) { match trigger.kind() { SyntaxKind::FN_DEF => { - for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block) - .iter() - .filter_map(|item| match item { - hir::AssocItem::Function(fn_item) => Some(fn_item), - _ => None, + for missing_fn in + get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| { + match item { + hir::AssocItem::Function(fn_item) => Some(fn_item), + _ => None, + } }) { add_function_impl(&trigger, acc, ctx, &missing_fn); @@ -76,11 +77,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext } SyntaxKind::TYPE_ALIAS_DEF => { - for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block) - .iter() - .filter_map(|item| match item { - hir::AssocItem::TypeAlias(type_item) => Some(type_item), - _ => None, + for missing_fn in + get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| { + match item { + hir::AssocItem::TypeAlias(type_item) => Some(type_item), + _ => None, + } }) { add_type_alias_impl(&trigger, acc, ctx, &missing_fn); @@ -88,11 +90,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext } SyntaxKind::CONST_DEF => { - for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block) - .iter() - .filter_map(|item| match item { - hir::AssocItem::Const(const_item) => Some(const_item), - _ => None, + for missing_fn in + get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| { + match item { + hir::AssocItem::Const(const_item) => Some(const_item), + _ => None, + } }) { add_const_impl(&trigger, acc, ctx, &missing_fn); diff --git a/crates/ra_ide/src/completion/completion_context.rs b/crates/ra_ide/src/completion/completion_context.rs index 8678a3234..81321a897 100644 --- a/crates/ra_ide/src/completion/completion_context.rs +++ b/crates/ra_ide/src/completion/completion_context.rs @@ -1,9 +1,11 @@ //! FIXME: write short doc here +use hir::{Semantics, SemanticsScope}; +use ra_db::SourceDatabase; use ra_ide_db::RootDatabase; use ra_syntax::{ algo::{find_covering_element, find_node_at_offset}, - ast, AstNode, Parse, SourceFile, + ast, AstNode, SourceFile, SyntaxKind::*, SyntaxNode, SyntaxToken, TextRange, TextUnit, }; @@ -15,8 +17,8 @@ use crate::FilePosition; /// exactly is the cursor, syntax-wise. #[derive(Debug)] pub(crate) struct CompletionContext<'a> { + pub(super) sema: Semantics<'a, RootDatabase>, pub(super) db: &'a RootDatabase, - pub(super) analyzer: hir::SourceAnalyzer, pub(super) offset: TextUnit, pub(super) token: SyntaxToken, pub(super) module: Option, @@ -51,20 +53,26 @@ pub(crate) struct CompletionContext<'a> { impl<'a> CompletionContext<'a> { pub(super) fn new( db: &'a RootDatabase, - original_parse: &'a Parse, position: FilePosition, ) -> Option> { - let mut sb = hir::SourceBinder::new(db); - let module = sb.to_module_def(position.file_id); - let token = - original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?; - let analyzer = sb.analyze( - hir::InFile::new(position.file_id.into(), &token.parent()), - Some(position.offset), - ); + let sema = Semantics::new(db); + + let original_file = sema.parse(position.file_id); + + // Insert a fake ident to get a valid parse tree. We will use this file + // to determine context, though the original_file will be used for + // actual completion. + let file_with_fake_ident = { + let parse = db.parse(position.file_id); + let edit = AtomTextEdit::insert(position.offset, "intellijRulezz".to_string()); + parse.reparse(&edit).tree() + }; + + let module = sema.to_module_def(position.file_id); + let token = original_file.syntax().token_at_offset(position.offset).left_biased()?; let mut ctx = CompletionContext { + sema, db, - analyzer, token, offset: position.offset, module, @@ -87,7 +95,7 @@ impl<'a> CompletionContext<'a> { has_type_args: false, dot_receiver_is_ambiguous_float_literal: false, }; - ctx.fill(&original_parse, position.offset); + ctx.fill(&original_file, file_with_fake_ident, position.offset); Some(ctx) } @@ -100,29 +108,33 @@ impl<'a> CompletionContext<'a> { } } - fn fill(&mut self, original_parse: &'a Parse, offset: TextUnit) { - // Insert a fake ident to get a valid parse tree. We will use this file - // to determine context, though the original_file will be used for - // actual completion. - let file = { - let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string()); - original_parse.reparse(&edit).tree() - }; + pub(crate) fn scope(&self) -> SemanticsScope<'_, RootDatabase> { + self.sema.scope_at_offset(&self.token.parent(), self.offset) + } + fn fill( + &mut self, + original_file: &ast::SourceFile, + file_with_fake_ident: ast::SourceFile, + offset: TextUnit, + ) { // First, let's try to complete a reference to some declaration. - if let Some(name_ref) = find_node_at_offset::(file.syntax(), offset) { + if let Some(name_ref) = + find_node_at_offset::(file_with_fake_ident.syntax(), offset) + { // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. // See RFC#1685. if is_node::(name_ref.syntax()) { self.is_param = true; return; } - self.classify_name_ref(original_parse.tree(), name_ref); + self.classify_name_ref(original_file, name_ref); } // Otherwise, see if this is a declaration. We can use heuristics to // suggest declaration names, see `CompletionKind::Magic`. - if let Some(name) = find_node_at_offset::(file.syntax(), offset) { + if let Some(name) = find_node_at_offset::(file_with_fake_ident.syntax(), offset) + { if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) { let parent = bind_pat.syntax().parent(); if parent.clone().and_then(ast::MatchArm::cast).is_some() @@ -136,13 +148,12 @@ impl<'a> CompletionContext<'a> { return; } if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() { - self.record_lit_pat = - find_node_at_offset(original_parse.tree().syntax(), self.offset); + self.record_lit_pat = find_node_at_offset(original_file.syntax(), self.offset); } } } - fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) { + fn classify_name_ref(&mut self, original_file: &SourceFile, name_ref: ast::NameRef) { self.name_ref_syntax = find_node_at_offset(original_file.syntax(), name_ref.syntax().text_range().start()); let name_range = name_ref.syntax().text_range(); diff --git a/crates/ra_ide/src/diagnostics.rs b/crates/ra_ide/src/diagnostics.rs index 9cf86b26d..a52f7fdd9 100644 --- a/crates/ra_ide/src/diagnostics.rs +++ b/crates/ra_ide/src/diagnostics.rs @@ -2,7 +2,10 @@ use std::cell::RefCell; -use hir::diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink}; +use hir::{ + diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink}, + Semantics, +}; use itertools::Itertools; use ra_db::{RelativePath, SourceDatabase, SourceDatabaseExt}; use ra_ide_db::RootDatabase; @@ -24,7 +27,7 @@ pub enum Severity { pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec { let _p = profile("diagnostics"); - let mut sb = hir::SourceBinder::new(db); + let sema = Semantics::new(db); let parse = db.parse(file_id); let mut res = Vec::new(); @@ -110,7 +113,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec fix: Some(fix), }) }); - if let Some(m) = sb.to_module_def(file_id) { + if let Some(m) = sema.to_module_def(file_id) { m.diagnostics(db, &mut sink); }; drop(sink); diff --git a/crates/ra_ide/src/display/navigation_target.rs b/crates/ra_ide/src/display/navigation_target.rs index c9d0058a6..5afb23764 100644 --- a/crates/ra_ide/src/display/navigation_target.rs +++ b/crates/ra_ide/src/display/navigation_target.rs @@ -1,7 +1,7 @@ //! FIXME: write short doc here use either::Either; -use hir::{AssocItem, FieldSource, HasSource, InFile, ModuleSource}; +use hir::{original_range, AssocItem, FieldSource, HasSource, InFile, ModuleSource}; use ra_db::{FileId, SourceDatabase}; use ra_ide_db::RootDatabase; use ra_syntax::{ @@ -11,7 +11,11 @@ use ra_syntax::{ TextRange, }; -use crate::{expand::original_range, references::NameDefinition, FileSymbol}; +use crate::{ + // expand::original_range, + references::NameDefinition, + FileSymbol, +}; use super::short_label::ShortLabel; diff --git a/crates/ra_ide/src/expand.rs b/crates/ra_ide/src/expand.rs deleted file mode 100644 index 9f3aaa3a3..000000000 --- a/crates/ra_ide/src/expand.rs +++ /dev/null @@ -1,102 +0,0 @@ -//! Utilities to work with files, produced by macros. -use std::iter::successors; - -use hir::{InFile, Origin}; -use ra_db::FileId; -use ra_ide_db::RootDatabase; -use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken, TextRange}; - -use crate::FileRange; - -pub(crate) fn original_range(db: &RootDatabase, node: InFile<&SyntaxNode>) -> FileRange { - if let Some((range, Origin::Call)) = original_range_and_origin(db, node) { - return range; - } - - if let Some(expansion) = node.file_id.expansion_info(db) { - if let Some(call_node) = expansion.call_node() { - return FileRange { - file_id: call_node.file_id.original_file(db), - range: call_node.value.text_range(), - }; - } - } - - FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() } -} - -fn original_range_and_origin( - db: &RootDatabase, - node: InFile<&SyntaxNode>, -) -> Option<(FileRange, Origin)> { - let expansion = node.file_id.expansion_info(db)?; - - // the input node has only one token ? - let single = node.value.first_token()? == node.value.last_token()?; - - // FIXME: We should handle recurside macro expansions - let (range, origin) = node.value.descendants().find_map(|it| { - let first = it.first_token()?; - let last = it.last_token()?; - - if !single && first == last { - return None; - } - - // Try to map first and last tokens of node, and, if success, return the union range of mapped tokens - let (first, first_origin) = expansion.map_token_up(node.with_value(&first))?; - let (last, last_origin) = expansion.map_token_up(node.with_value(&last))?; - - if first.file_id != last.file_id || first_origin != last_origin { - return None; - } - - // FIXME: Add union method in TextRange - Some(( - first.with_value(union_range(first.value.text_range(), last.value.text_range())), - first_origin, - )) - })?; - - return Some(( - FileRange { file_id: range.file_id.original_file(db), range: range.value }, - origin, - )); - - fn union_range(a: TextRange, b: TextRange) -> TextRange { - let start = a.start().min(b.start()); - let end = a.end().max(b.end()); - TextRange::from_to(start, end) - } -} - -pub(crate) fn descend_into_macros( - db: &RootDatabase, - file_id: FileId, - token: SyntaxToken, -) -> InFile { - let src = InFile::new(file_id.into(), token); - - let source_analyzer = - hir::SourceAnalyzer::new(db, src.with_value(src.value.parent()).as_ref(), None); - - descend_into_macros_with_analyzer(db, &source_analyzer, src) -} - -pub(crate) fn descend_into_macros_with_analyzer( - db: &RootDatabase, - source_analyzer: &hir::SourceAnalyzer, - src: InFile, -) -> InFile { - successors(Some(src), |token| { - let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; - let tt = macro_call.token_tree()?; - if !token.value.text_range().is_subrange(&tt.syntax().text_range()) { - return None; - } - let exp = source_analyzer.expand(db, token.with_value(¯o_call))?; - exp.map_token_down(db, token.as_ref()) - }) - .last() - .unwrap() -} diff --git a/crates/ra_ide/src/expand_macro.rs b/crates/ra_ide/src/expand_macro.rs index af2783bef..f2814e684 100644 --- a/crates/ra_ide/src/expand_macro.rs +++ b/crates/ra_ide/src/expand_macro.rs @@ -1,7 +1,6 @@ //! This modules implements "expand macro" functionality in the IDE -use hir::db::AstDatabase; -use ra_db::SourceDatabase; +use hir::Semantics; use ra_ide_db::RootDatabase; use ra_syntax::{ algo::{find_node_at_offset, replace_descendants}, @@ -17,13 +16,12 @@ pub struct ExpandedMacro { } pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option { - let parse = db.parse(position.file_id); - let file = parse.tree(); + let sema = Semantics::new(db); + let file = sema.parse(position.file_id); let name_ref = find_node_at_offset::(file.syntax(), position.offset)?; let mac = name_ref.syntax().ancestors().find_map(ast::MacroCall::cast)?; - let source = hir::InFile::new(position.file_id.into(), mac.syntax()); - let expanded = expand_macro_recur(db, source, source.with_value(&mac))?; + let expanded = expand_macro_recur(&sema, &mac)?; // FIXME: // macro expansion may lose all white space information @@ -33,21 +31,16 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< } fn expand_macro_recur( - db: &RootDatabase, - source: hir::InFile<&SyntaxNode>, - macro_call: hir::InFile<&ast::MacroCall>, + sema: &Semantics, + macro_call: &ast::MacroCall, ) -> Option { - let analyzer = hir::SourceAnalyzer::new(db, source, None); - let expansion = analyzer.expand(db, macro_call)?; - let macro_file_id = expansion.file_id(); - let mut expanded: SyntaxNode = db.parse_or_expand(macro_file_id)?; + let mut expanded = sema.expand(macro_call)?; let children = expanded.descendants().filter_map(ast::MacroCall::cast); let mut replaces: FxHashMap = FxHashMap::default(); for child in children.into_iter() { - let node = hir::InFile::new(macro_file_id, &child); - if let Some(new_node) = expand_macro_recur(db, source, node) { + if let Some(new_node) = expand_macro_recur(sema, &child) { // Replace the whole node if it is root // `replace_descendants` will not replace the parent node // but `SyntaxNode::descendants include itself @@ -120,10 +113,12 @@ fn insert_whitespaces(syn: SyntaxNode) -> String { #[cfg(test)] mod tests { - use super::*; - use crate::mock_analysis::analysis_and_position; use insta::assert_snapshot; + use crate::mock_analysis::analysis_and_position; + + use super::*; + fn check_expand_macro(fixture: &str) -> ExpandedMacro { let (analysis, pos) = analysis_and_position(fixture); analysis.expand_macro(pos).unwrap().unwrap() diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs index 1e7d0621a..86e6f12d7 100644 --- a/crates/ra_ide/src/extend_selection.rs +++ b/crates/ra_ide/src/extend_selection.rs @@ -2,26 +2,26 @@ use std::iter::successors; -use hir::db::AstDatabase; -use ra_db::SourceDatabase; +use hir::Semantics; use ra_ide_db::RootDatabase; use ra_syntax::{ - algo::find_covering_element, + algo::{self, find_covering_element}, ast::{self, AstNode, AstToken}, - Direction, NodeOrToken, SyntaxElement, + Direction, NodeOrToken, SyntaxKind::{self, *}, SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T, }; -use crate::{expand::descend_into_macros, FileId, FileRange}; +use crate::FileRange; pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { - let src = db.parse(frange.file_id).tree(); - try_extend_selection(db, src.syntax(), frange).unwrap_or(frange.range) + let sema = Semantics::new(db); + let src = sema.parse(frange.file_id); + try_extend_selection(&sema, src.syntax(), frange).unwrap_or(frange.range) } fn try_extend_selection( - db: &RootDatabase, + sema: &Semantics, root: &SyntaxNode, frange: FileRange, ) -> Option { @@ -86,7 +86,7 @@ fn try_extend_selection( // if we are in single token_tree, we maybe live in macro or attr if node.kind() == TOKEN_TREE { if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) { - if let Some(range) = extend_tokens_from_range(db, frange.file_id, macro_call, range) { + if let Some(range) = extend_tokens_from_range(sema, macro_call, range) { return Some(range); } } @@ -96,7 +96,7 @@ fn try_extend_selection( return Some(node.text_range()); } - let node = shallowest_node(&node.into()).unwrap(); + let node = shallowest_node(&node.into()); if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { if let Some(range) = extend_list_item(&node) { @@ -108,8 +108,7 @@ fn try_extend_selection( } fn extend_tokens_from_range( - db: &RootDatabase, - file_id: FileId, + sema: &Semantics, macro_call: ast::MacroCall, original_range: TextRange, ) -> Option { @@ -130,25 +129,21 @@ fn extend_tokens_from_range( } // compute original mapped token range - let expanded = { - let first_node = descend_into_macros(db, file_id, first_token.clone()); - let first_node = first_node.map(|it| it.text_range()); - - let last_node = descend_into_macros(db, file_id, last_token.clone()); - if last_node.file_id == file_id.into() || first_node.file_id != last_node.file_id { - return None; + let extended = { + let fst_expanded = sema.descend_into_macros(first_token.clone()); + let lst_expanded = sema.descend_into_macros(last_token.clone()); + let mut lca = algo::least_common_ancestor(&fst_expanded.parent(), &lst_expanded.parent())?; + lca = shallowest_node(&lca); + if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) { + lca = lca.parent()?; } - first_node.map(|it| union_range(it, last_node.value.text_range())) + lca }; // Compute parent node range - let src = db.parse_or_expand(expanded.file_id)?; - let parent = shallowest_node(&find_covering_element(&src, expanded.value))?.parent()?; - let validate = |token: &SyntaxToken| { - let node = descend_into_macros(db, file_id, token.clone()); - node.file_id == expanded.file_id - && node.value.text_range().is_subrange(&parent.text_range()) + let expanded = sema.descend_into_macros(token.clone()); + algo::least_common_ancestor(&extended, &expanded.parent()).as_ref() == Some(&extended) }; // Find the first and last text range under expanded parent @@ -191,8 +186,8 @@ fn union_range(range: TextRange, r: TextRange) -> TextRange { } /// Find the shallowest node with same range, which allows us to traverse siblings. -fn shallowest_node(node: &SyntaxElement) -> Option { - node.ancestors().take_while(|n| n.text_range() == node.text_range()).last() +fn shallowest_node(node: &SyntaxNode) -> SyntaxNode { + node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap() } fn extend_single_word_in_comment_or_string( diff --git a/crates/ra_ide/src/goto_definition.rs b/crates/ra_ide/src/goto_definition.rs index feff1ec3f..6053c1bb6 100644 --- a/crates/ra_ide/src/goto_definition.rs +++ b/crates/ra_ide/src/goto_definition.rs @@ -1,7 +1,7 @@ //! FIXME: write short doc here -use hir::{db::AstDatabase, InFile, SourceBinder}; -use ra_ide_db::{symbol_index, RootDatabase}; +use hir::Semantics; +use ra_ide_db::{defs::classify_name, symbol_index, RootDatabase}; use ra_syntax::{ ast::{self}, match_ast, AstNode, @@ -11,8 +11,7 @@ use ra_syntax::{ use crate::{ display::{ToNav, TryToNav}, - expand::descend_into_macros, - references::{classify_name, classify_name_ref}, + references::classify_name_ref, FilePosition, NavigationTarget, RangeInfo, }; @@ -20,18 +19,18 @@ pub(crate) fn goto_definition( db: &RootDatabase, position: FilePosition, ) -> Option>> { - let file = db.parse_or_expand(position.file_id.into())?; + let sema = Semantics::new(db); + let file = sema.parse(position.file_id).syntax().clone(); let original_token = pick_best(file.token_at_offset(position.offset))?; - let token = descend_into_macros(db, position.file_id, original_token.clone()); + let token = sema.descend_into_macros(original_token.clone()); - let mut sb = SourceBinder::new(db); let nav_targets = match_ast! { - match (token.value.parent()) { + match (token.parent()) { ast::NameRef(name_ref) => { - reference_definition(&mut sb, token.with_value(&name_ref)).to_vec() + reference_definition(&sema, &name_ref).to_vec() }, ast::Name(name) => { - name_definition(&mut sb, token.with_value(&name))? + name_definition(&sema, &name)? }, _ => return None, } @@ -68,33 +67,33 @@ impl ReferenceResult { } pub(crate) fn reference_definition( - sb: &mut SourceBinder, - name_ref: InFile<&ast::NameRef>, + sema: &Semantics, + name_ref: &ast::NameRef, ) -> ReferenceResult { use self::ReferenceResult::*; - let name_kind = classify_name_ref(sb, name_ref); + let name_kind = classify_name_ref(sema, name_ref); if let Some(def) = name_kind { - return match def.try_to_nav(sb.db) { + return match def.try_to_nav(sema.db) { Some(nav) => ReferenceResult::Exact(nav), None => ReferenceResult::Approximate(Vec::new()), }; } // Fallback index based approach: - let navs = symbol_index::index_resolve(sb.db, name_ref.value) + let navs = symbol_index::index_resolve(sema.db, name_ref) .into_iter() - .map(|s| s.to_nav(sb.db)) + .map(|s| s.to_nav(sema.db)) .collect(); Approximate(navs) } fn name_definition( - sb: &mut SourceBinder, - name: InFile<&ast::Name>, + sema: &Semantics, + name: &ast::Name, ) -> Option> { - let def = classify_name(sb, name)?; - let nav = def.try_to_nav(sb.db)?; + let def = classify_name(sema, name)?; + let nav = def.try_to_nav(sema.db)?; Some(vec![nav]) } diff --git a/crates/ra_ide/src/goto_type_definition.rs b/crates/ra_ide/src/goto_type_definition.rs index 69940fc36..869a4708b 100644 --- a/crates/ra_ide/src/goto_type_definition.rs +++ b/crates/ra_ide/src/goto_type_definition.rs @@ -1,31 +1,31 @@ //! FIXME: write short doc here -use hir::db::AstDatabase; use ra_ide_db::RootDatabase; -use ra_syntax::{ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset}; +use ra_syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset}; -use crate::{ - display::ToNav, expand::descend_into_macros, FilePosition, NavigationTarget, RangeInfo, -}; +use crate::{display::ToNav, FilePosition, NavigationTarget, RangeInfo}; pub(crate) fn goto_type_definition( db: &RootDatabase, position: FilePosition, ) -> Option>> { - let file = db.parse_or_expand(position.file_id.into())?; - let token = pick_best(file.token_at_offset(position.offset))?; - let token = descend_into_macros(db, position.file_id, token); - - let node = token - .value - .ancestors() - .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?; - - let analyzer = hir::SourceAnalyzer::new(db, token.with_value(&node), None); + let sema = hir::Semantics::new(db); + + let file: ast::SourceFile = sema.parse(position.file_id); + let token: SyntaxToken = pick_best(file.syntax().token_at_offset(position.offset))?; + let token: SyntaxToken = sema.descend_into_macros(token); + + let (ty, node) = sema.ancestors_with_macros(token.parent()).find_map(|node| { + let ty = match_ast! { + match node { + ast::Expr(expr) => { sema.type_of_expr(&expr)? }, + ast::Pat(pat) => { sema.type_of_pat(&pat)? }, + _ => { return None }, + } + }; - let ty: hir::Type = ast::Expr::cast(node.clone()) - .and_then(|e| analyzer.type_of(db, &e)) - .or_else(|| ast::Pat::cast(node.clone()).and_then(|p| analyzer.type_of_pat(db, &p)))?; + Some((ty, node)) + })?; let adt_def = ty.autoderef(db).find_map(|ty| ty.as_adt())?; diff --git a/crates/ra_ide/src/hover.rs b/crates/ra_ide/src/hover.rs index 1c6ca36df..ace33c079 100644 --- a/crates/ra_ide/src/hover.rs +++ b/crates/ra_ide/src/hover.rs @@ -1,8 +1,10 @@ //! FIXME: write short doc here -use hir::{db::AstDatabase, Adt, HasSource, HirDisplay, SourceBinder}; -use ra_db::SourceDatabase; -use ra_ide_db::{defs::NameDefinition, RootDatabase}; +use hir::{Adt, HasSource, HirDisplay, Semantics}; +use ra_ide_db::{ + defs::{classify_name, NameDefinition}, + RootDatabase, +}; use ra_syntax::{ algo::find_covering_element, ast::{self, DocCommentsOwner}, @@ -13,8 +15,7 @@ use ra_syntax::{ use crate::{ display::{macro_label, rust_code_markup, rust_code_markup_with_doc, ShortLabel}, - expand::{descend_into_macros, original_range}, - references::{classify_name, classify_name_ref}, + references::classify_name_ref, FilePosition, FileRange, RangeInfo, }; @@ -143,25 +144,25 @@ fn hover_text_from_name_kind(db: &RootDatabase, def: NameDefinition) -> Option Option> { - let file = db.parse_or_expand(position.file_id.into())?; + let sema = Semantics::new(db); + let file = sema.parse(position.file_id).syntax().clone(); let token = pick_best(file.token_at_offset(position.offset))?; - let token = descend_into_macros(db, position.file_id, token); + let token = sema.descend_into_macros(token); let mut res = HoverResult::new(); - let mut sb = SourceBinder::new(db); if let Some((node, name_kind)) = match_ast! { - match (token.value.parent()) { + match (token.parent()) { ast::NameRef(name_ref) => { - classify_name_ref(&mut sb, token.with_value(&name_ref)).map(|d| (name_ref.syntax().clone(), d)) + classify_name_ref(&sema, &name_ref).map(|d| (name_ref.syntax().clone(), d)) }, ast::Name(name) => { - classify_name(&mut sb, token.with_value(&name)).map(|d| (name.syntax().clone(), d)) + classify_name(&sema, &name).map(|d| (name.syntax().clone(), d)) }, _ => None, } } { - let range = original_range(db, token.with_value(&node)).range; + let range = sema.original_range(&node).range; res.extend(hover_text_from_name_kind(db, name_kind)); if !res.is_empty() { @@ -170,11 +171,10 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option) -> Option { } pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option { - let parse = db.parse(frange.file_id); - let leaf_node = find_covering_element(parse.tree().syntax(), frange.range); + let sema = Semantics::new(db); + let source_file = sema.parse(frange.file_id); + let leaf_node = find_covering_element(source_file.syntax(), frange.range); // if we picked identifier, expand to pattern/expression let node = leaf_node .ancestors() .take_while(|it| it.text_range() == leaf_node.text_range()) .find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?; - let analyzer = - hir::SourceAnalyzer::new(db, hir::InFile::new(frange.file_id.into(), &node), None); - let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e)) - { + let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| sema.type_of_expr(&e)) { ty - } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, &p)) { + } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| sema.type_of_pat(&p)) { ty } else { return None; @@ -219,11 +217,12 @@ pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option { #[cfg(test)] mod tests { + use ra_db::FileLoader; + use ra_syntax::TextRange; + use crate::mock_analysis::{ analysis_and_position, single_file_with_position, single_file_with_range, }; - use ra_db::FileLoader; - use ra_syntax::TextRange; fn trim_markup(s: &str) -> &str { s.trim_start_matches("```rust\n").trim_end_matches("\n```") diff --git a/crates/ra_ide/src/impls.rs b/crates/ra_ide/src/impls.rs index 64a2dadc8..bf82b2a16 100644 --- a/crates/ra_ide/src/impls.rs +++ b/crates/ra_ide/src/impls.rs @@ -1,7 +1,6 @@ //! FIXME: write short doc here -use hir::{Crate, ImplBlock, SourceBinder}; -use ra_db::SourceDatabase; +use hir::{Crate, ImplBlock, Semantics}; use ra_ide_db::RootDatabase; use ra_syntax::{algo::find_node_at_offset, ast, AstNode}; @@ -11,21 +10,21 @@ pub(crate) fn goto_implementation( db: &RootDatabase, position: FilePosition, ) -> Option>> { - let parse = db.parse(position.file_id); - let syntax = parse.tree().syntax().clone(); - let mut sb = SourceBinder::new(db); + let sema = Semantics::new(db); + let source_file = sema.parse(position.file_id); + let syntax = source_file.syntax().clone(); - let krate = sb.to_module_def(position.file_id)?.krate(); + let krate = sema.to_module_def(position.file_id)?.krate(); if let Some(nominal_def) = find_node_at_offset::(&syntax, position.offset) { return Some(RangeInfo::new( nominal_def.syntax().text_range(), - impls_for_def(&mut sb, position, &nominal_def, krate)?, + impls_for_def(&sema, &nominal_def, krate)?, )); } else if let Some(trait_def) = find_node_at_offset::(&syntax, position.offset) { return Some(RangeInfo::new( trait_def.syntax().text_range(), - impls_for_trait(&mut sb, position, &trait_def, krate)?, + impls_for_trait(&sema, &trait_def, krate)?, )); } @@ -33,49 +32,37 @@ pub(crate) fn goto_implementation( } fn impls_for_def( - sb: &mut SourceBinder, - position: FilePosition, + sema: &Semantics, node: &ast::NominalDef, krate: Crate, ) -> Option> { let ty = match node { - ast::NominalDef::StructDef(def) => { - let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() }; - sb.to_def(src)?.ty(sb.db) - } - ast::NominalDef::EnumDef(def) => { - let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() }; - sb.to_def(src)?.ty(sb.db) - } - ast::NominalDef::UnionDef(def) => { - let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() }; - sb.to_def(src)?.ty(sb.db) - } + ast::NominalDef::StructDef(def) => sema.to_def(def)?.ty(sema.db), + ast::NominalDef::EnumDef(def) => sema.to_def(def)?.ty(sema.db), + ast::NominalDef::UnionDef(def) => sema.to_def(def)?.ty(sema.db), }; - let impls = ImplBlock::all_in_crate(sb.db, krate); + let impls = ImplBlock::all_in_crate(sema.db, krate); Some( impls .into_iter() - .filter(|impl_block| ty.is_equal_for_find_impls(&impl_block.target_ty(sb.db))) - .map(|imp| imp.to_nav(sb.db)) + .filter(|impl_block| ty.is_equal_for_find_impls(&impl_block.target_ty(sema.db))) + .map(|imp| imp.to_nav(sema.db)) .collect(), ) } fn impls_for_trait( - sb: &mut SourceBinder, - position: FilePosition, + sema: &Semantics, node: &ast::TraitDef, krate: Crate, ) -> Option> { - let src = hir::InFile { file_id: position.file_id.into(), value: node.clone() }; - let tr = sb.to_def(src)?; + let tr = sema.to_def(node)?; - let impls = ImplBlock::for_trait(sb.db, krate, tr); + let impls = ImplBlock::for_trait(sema.db, krate, tr); - Some(impls.into_iter().map(|imp| imp.to_nav(sb.db)).collect()) + Some(impls.into_iter().map(|imp| imp.to_nav(sema.db)).collect()) } #[cfg(test)] diff --git a/crates/ra_ide/src/inlay_hints.rs b/crates/ra_ide/src/inlay_hints.rs index b42aa1523..35e3f782d 100644 --- a/crates/ra_ide/src/inlay_hints.rs +++ b/crates/ra_ide/src/inlay_hints.rs @@ -1,12 +1,11 @@ //! FIXME: write short doc here -use hir::{Adt, HirDisplay, SourceAnalyzer, SourceBinder, Type}; -use once_cell::unsync::Lazy; +use hir::{Adt, HirDisplay, Semantics, Type}; use ra_ide_db::RootDatabase; use ra_prof::profile; use ra_syntax::{ ast::{self, ArgListOwner, AstNode, TypeAscriptionOwner}, - match_ast, SmolStr, SourceFile, SyntaxNode, TextRange, + match_ast, SmolStr, SyntaxNode, TextRange, }; use crate::{FileId, FunctionSignature}; @@ -27,38 +26,36 @@ pub struct InlayHint { pub(crate) fn inlay_hints( db: &RootDatabase, file_id: FileId, - file: &SourceFile, max_inlay_hint_length: Option, ) -> Vec { - let mut sb = SourceBinder::new(db); + let sema = Semantics::new(db); + let file = sema.parse(file_id); let mut res = Vec::new(); for node in file.syntax().descendants() { - get_inlay_hints(&mut res, &mut sb, file_id, &node, max_inlay_hint_length); + get_inlay_hints(&mut res, &sema, &node, max_inlay_hint_length); } res } fn get_inlay_hints( acc: &mut Vec, - sb: &mut SourceBinder, - file_id: FileId, + sema: &Semantics, node: &SyntaxNode, max_inlay_hint_length: Option, ) -> Option<()> { let _p = profile("get_inlay_hints"); - let db = sb.db; - let analyzer = Lazy::new(move || sb.analyze(hir::InFile::new(file_id.into(), node), None)); + let db = sema.db; match_ast! { match node { ast::CallExpr(it) => { - get_param_name_hints(acc, db, &analyzer, ast::Expr::from(it)); + get_param_name_hints(acc, sema, ast::Expr::from(it)); }, ast::MethodCallExpr(it) => { - get_param_name_hints(acc, db, &analyzer, ast::Expr::from(it)); + get_param_name_hints(acc, sema, ast::Expr::from(it)); }, ast::BindPat(it) => { let pat = ast::Pat::from(it.clone()); - let ty = analyzer.type_of_pat(db, &pat)?; + let ty = sema.type_of_pat(&pat)?; if should_not_display_type_hint(db, &it, &ty) { return None; @@ -125,8 +122,7 @@ fn should_not_display_type_hint(db: &RootDatabase, bind_pat: &ast::BindPat, pat_ fn get_param_name_hints( acc: &mut Vec, - db: &RootDatabase, - analyzer: &SourceAnalyzer, + sema: &Semantics, expr: ast::Expr, ) -> Option<()> { let args = match &expr { @@ -138,7 +134,7 @@ fn get_param_name_hints( // we need args len to determine whether to skip or not the &self parameter .collect::>(); - let fn_signature = get_fn_signature(db, analyzer, &expr)?; + let fn_signature = get_fn_signature(sema, &expr)?; let n_params_to_skip = if fn_signature.has_self_param && fn_signature.parameter_names.len() > args.len() { 1 @@ -184,28 +180,26 @@ fn should_show_param_hint( true } -fn get_fn_signature( - db: &RootDatabase, - analyzer: &SourceAnalyzer, - expr: &ast::Expr, -) -> Option { +fn get_fn_signature(sema: &Semantics, expr: &ast::Expr) -> Option { match expr { ast::Expr::CallExpr(expr) => { // FIXME: Type::as_callable is broken for closures - let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; + let callable_def = sema.type_of_expr(&expr.expr()?)?.as_callable()?; match callable_def { hir::CallableDef::FunctionId(it) => { - Some(FunctionSignature::from_hir(db, it.into())) + Some(FunctionSignature::from_hir(sema.db, it.into())) + } + hir::CallableDef::StructId(it) => { + FunctionSignature::from_struct(sema.db, it.into()) } - hir::CallableDef::StructId(it) => FunctionSignature::from_struct(db, it.into()), hir::CallableDef::EnumVariantId(it) => { - FunctionSignature::from_enum_variant(db, it.into()) + FunctionSignature::from_enum_variant(sema.db, it.into()) } } } ast::Expr::MethodCallExpr(expr) => { - let fn_def = analyzer.resolve_method_call(&expr)?; - Some(FunctionSignature::from_hir(db, fn_def)) + let fn_def = sema.resolve_method_call(&expr)?; + Some(FunctionSignature::from_hir(sema.db, fn_def)) } _ => None, } diff --git a/crates/ra_ide/src/lib.rs b/crates/ra_ide/src/lib.rs index d22870669..f31d3c295 100644 --- a/crates/ra_ide/src/lib.rs +++ b/crates/ra_ide/src/lib.rs @@ -35,7 +35,6 @@ mod typing; mod matching_brace; mod display; mod inlay_hints; -mod expand; mod expand_macro; mod ssr; @@ -319,9 +318,7 @@ impl Analysis { file_id: FileId, max_inlay_hint_length: Option, ) -> Cancelable> { - self.with_db(|db| { - inlay_hints::inlay_hints(db, file_id, &db.parse(file_id).tree(), max_inlay_hint_length) - }) + self.with_db(|db| inlay_hints::inlay_hints(db, file_id, max_inlay_hint_length)) } /// Returns the set of folding ranges. diff --git a/crates/ra_ide/src/marks.rs b/crates/ra_ide/src/marks.rs index bcb67e373..7b8b727b4 100644 --- a/crates/ra_ide/src/marks.rs +++ b/crates/ra_ide/src/marks.rs @@ -11,4 +11,5 @@ test_utils::marks!( call_info_bad_offset dont_complete_current_use test_resolve_parent_module_on_module_decl + search_filters_by_range ); diff --git a/crates/ra_ide/src/parent_module.rs b/crates/ra_ide/src/parent_module.rs index af14d6ab3..2c4bdb039 100644 --- a/crates/ra_ide/src/parent_module.rs +++ b/crates/ra_ide/src/parent_module.rs @@ -1,6 +1,7 @@ //! FIXME: write short doc here -use ra_db::{CrateId, FileId, FilePosition, SourceDatabase}; +use hir::Semantics; +use ra_db::{CrateId, FileId, FilePosition}; use ra_ide_db::RootDatabase; use ra_syntax::{ algo::find_node_at_offset, @@ -13,10 +14,10 @@ use crate::NavigationTarget; /// This returns `Vec` because a module may be included from several places. We /// don't handle this case yet though, so the Vec has length at most one. pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec { - let mut sb = hir::SourceBinder::new(db); - let parse = db.parse(position.file_id); + let sema = Semantics::new(db); + let source_file = sema.parse(position.file_id); - let mut module = find_node_at_offset::(parse.tree().syntax(), position.offset); + let mut module = find_node_at_offset::(source_file.syntax(), position.offset); // If cursor is literally on `mod foo`, go to the grandpa. if let Some(m) = &module { @@ -30,8 +31,8 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec sb.to_def(hir::InFile::new(position.file_id.into(), module)), - None => sb.to_module_def(position.file_id), + Some(module) => sema.to_def(&module), + None => sema.to_module_def(position.file_id), }; let module = match module { None => return Vec::new(), @@ -43,8 +44,8 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec Vec { - let mut sb = hir::SourceBinder::new(db); - let module = match sb.to_module_def(file_id) { + let sema = Semantics::new(db); + let module = match sema.to_module_def(file_id) { Some(it) => it, None => return Vec::new(), }; diff --git a/crates/ra_ide/src/references.rs b/crates/ra_ide/src/references.rs index aadc2dbcb..baa8a4d29 100644 --- a/crates/ra_ide/src/references.rs +++ b/crates/ra_ide/src/references.rs @@ -13,25 +13,22 @@ mod classify; mod rename; mod search_scope; -use crate::expand::descend_into_macros_with_analyzer; -use hir::{InFile, SourceBinder}; +use hir::Semantics; use once_cell::unsync::Lazy; -use ra_db::{SourceDatabase, SourceDatabaseExt}; +use ra_db::SourceDatabaseExt; use ra_ide_db::RootDatabase; use ra_prof::profile; use ra_syntax::{ algo::find_node_at_offset, ast::{self, NameOwner}, - match_ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, TextRange, TextUnit, TokenAtOffset, + match_ast, AstNode, SyntaxKind, SyntaxNode, TextRange, TextUnit, TokenAtOffset, }; +use test_utils::tested_by; use crate::{display::TryToNav, FilePosition, FileRange, NavigationTarget, RangeInfo}; -pub(crate) use self::{ - classify::{classify_name, classify_name_ref}, - rename::rename, -}; -pub(crate) use ra_ide_db::defs::NameDefinition; +pub(crate) use self::{classify::classify_name_ref, rename::rename}; +pub(crate) use ra_ide_db::defs::{classify_name, NameDefinition}; pub use self::search_scope::SearchScope; @@ -114,8 +111,8 @@ pub(crate) fn find_all_refs( position: FilePosition, search_scope: Option, ) -> Option> { - let parse = db.parse(position.file_id); - let syntax = parse.tree().syntax().clone(); + let sema = Semantics::new(db); + let syntax = sema.parse(position.file_id).syntax().clone(); let (opt_name, search_kind) = if let Some(name) = get_struct_def_name_for_struc_litetal_search(&syntax, position) { @@ -124,7 +121,7 @@ pub(crate) fn find_all_refs( (find_node_at_offset::(&syntax, position.offset), ReferenceKind::Other) }; - let RangeInfo { range, info: (name, def) } = find_name(db, &syntax, position, opt_name)?; + let RangeInfo { range, info: (name, def) } = find_name(&sema, &syntax, position, opt_name)?; let declaration = def.try_to_nav(db)?; let search_scope = { @@ -152,19 +149,18 @@ pub(crate) fn find_all_refs( } fn find_name( - db: &RootDatabase, + sema: &Semantics, syntax: &SyntaxNode, position: FilePosition, opt_name: Option, ) -> Option> { - let mut sb = SourceBinder::new(db); if let Some(name) = opt_name { - let def = classify_name(&mut sb, InFile::new(position.file_id.into(), &name))?; + let def = classify_name(sema, &name)?; let range = name.syntax().text_range(); return Some(RangeInfo::new(range, (name.text().to_string(), def))); } let name_ref = find_node_at_offset::(&syntax, position.offset)?; - let def = classify_name_ref(&mut sb, InFile::new(position.file_id.into(), &name_ref))?; + let def = classify_name_ref(sema, &name_ref)?; let range = name_ref.syntax().text_range(); Some(RangeInfo::new(range, (name_ref.text().to_string(), def))) } @@ -182,64 +178,53 @@ fn process_definition( for (file_id, search_range) in scope { let text = db.file_text(file_id); + let search_range = + search_range.unwrap_or(TextRange::offset_len(0.into(), TextUnit::of_str(&text))); - let parse = Lazy::new(|| SourceFile::parse(&text)); - let mut sb = Lazy::new(|| SourceBinder::new(db)); - let mut analyzer = None; + let sema = Semantics::new(db); + let tree = Lazy::new(|| sema.parse(file_id).syntax().clone()); for (idx, _) in text.match_indices(pat) { let offset = TextUnit::from_usize(idx); + if !search_range.contains_inclusive(offset) { + tested_by!(search_filters_by_range); + continue; + } - let (name_ref, range) = if let Some(name_ref) = - find_node_at_offset::(parse.tree().syntax(), offset) - { - let range = name_ref.syntax().text_range(); - (InFile::new(file_id.into(), name_ref), range) - } else { - // Handle macro token cases - let t = match parse.tree().syntax().token_at_offset(offset) { - TokenAtOffset::None => continue, - TokenAtOffset::Single(t) => t, - TokenAtOffset::Between(_, t) => t, - }; - let range = t.text_range(); - let analyzer = analyzer.get_or_insert_with(|| { - sb.analyze(InFile::new(file_id.into(), parse.tree().syntax()), None) - }); - let expanded = descend_into_macros_with_analyzer( - db, - &analyzer, - InFile::new(file_id.into(), t), - ); - if let Some(token) = ast::NameRef::cast(expanded.value.parent()) { - (expanded.with_value(token), range) + let name_ref = + if let Some(name_ref) = find_node_at_offset::(&tree, offset) { + name_ref } else { - continue; - } - }; + // Handle macro token cases + let token = match tree.token_at_offset(offset) { + TokenAtOffset::None => continue, + TokenAtOffset::Single(t) => t, + TokenAtOffset::Between(_, t) => t, + }; + let expanded = sema.descend_into_macros(token); + match ast::NameRef::cast(expanded.parent()) { + Some(name_ref) => name_ref, + _ => continue, + } + }; - if let Some(search_range) = search_range { - if !range.is_subrange(&search_range) { - continue; - } - } // FIXME: reuse sb // See https://github.com/rust-lang/rust/pull/68198#issuecomment-574269098 - if let Some(d) = classify_name_ref(&mut sb, name_ref.as_ref()) { + if let Some(d) = classify_name_ref(&sema, &name_ref) { if d == def { - let kind = if is_record_lit_name_ref(&name_ref.value) - || is_call_expr_name_ref(&name_ref.value) - { - ReferenceKind::StructLiteral - } else { - ReferenceKind::Other - }; - + let kind = + if is_record_lit_name_ref(&name_ref) || is_call_expr_name_ref(&name_ref) { + ReferenceKind::StructLiteral + } else { + ReferenceKind::Other + }; + + let file_range = sema.original_range(name_ref.syntax()); refs.push(Reference { - file_range: FileRange { file_id, range }, + file_range, kind, - access: reference_access(&d, &name_ref.value), + access: reference_access(&d, &name_ref), }); } } @@ -348,6 +333,8 @@ fn is_call_expr_name_ref(name_ref: &ast::NameRef) -> bool { #[cfg(test)] mod tests { + use test_utils::covers; + use crate::{ mock_analysis::{analysis_and_position, single_file_with_position, MockAnalysis}, Declaration, Reference, ReferenceSearchResult, SearchScope, @@ -455,6 +442,27 @@ mod tests { ); } + #[test] + fn search_filters_by_range() { + covers!(search_filters_by_range); + let code = r#" + fn foo() { + let spam<|> = 92; + spam + spam + } + fn bar() { + let spam = 92; + spam + spam + } + "#; + let refs = get_all_refs(code); + check_result( + refs, + "spam BIND_PAT FileId(1) [44; 48) Other Write", + &["FileId(1) [71; 75) Other Read", "FileId(1) [78; 82) Other Read"], + ); + } + #[test] fn test_find_all_refs_for_param_inside() { let code = r#" diff --git a/crates/ra_ide/src/references/classify.rs b/crates/ra_ide/src/references/classify.rs index 478e18871..91b21429a 100644 --- a/crates/ra_ide/src/references/classify.rs +++ b/crates/ra_ide/src/references/classify.rs @@ -1,34 +1,32 @@ //! Functions that are used to classify an element from its definition or reference. -use hir::{InFile, PathResolution, SourceBinder}; +use hir::{PathResolution, Semantics}; +use ra_ide_db::defs::NameDefinition; +use ra_ide_db::RootDatabase; use ra_prof::profile; use ra_syntax::{ast, AstNode}; use test_utils::tested_by; -use super::NameDefinition; -use ra_ide_db::RootDatabase; - -pub use ra_ide_db::defs::{classify_name, from_module_def, from_struct_field}; +pub use ra_ide_db::defs::{from_module_def, from_struct_field}; pub(crate) fn classify_name_ref( - sb: &mut SourceBinder, - name_ref: InFile<&ast::NameRef>, + sema: &Semantics, + name_ref: &ast::NameRef, ) -> Option { let _p = profile("classify_name_ref"); - let parent = name_ref.value.syntax().parent()?; - let analyzer = sb.analyze(name_ref.map(|it| it.syntax()), None); + let parent = name_ref.syntax().parent()?; if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) { tested_by!(goto_def_for_methods); - if let Some(func) = analyzer.resolve_method_call(&method_call) { + if let Some(func) = sema.resolve_method_call(&method_call) { return Some(from_module_def(func.into())); } } if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { tested_by!(goto_def_for_fields); - if let Some(field) = analyzer.resolve_field(&field_expr) { + if let Some(field) = sema.resolve_field(&field_expr) { return Some(from_struct_field(field)); } } @@ -36,22 +34,20 @@ pub(crate) fn classify_name_ref( if let Some(record_field) = ast::RecordField::cast(parent.clone()) { tested_by!(goto_def_for_record_fields); tested_by!(goto_def_for_field_init_shorthand); - if let Some(field_def) = analyzer.resolve_record_field(&record_field) { + if let Some(field_def) = sema.resolve_record_field(&record_field) { return Some(from_struct_field(field_def)); } } if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) { tested_by!(goto_def_for_macros); - if let Some(macro_def) = - analyzer.resolve_macro_call(sb.db, name_ref.with_value(¯o_call)) - { + if let Some(macro_def) = sema.resolve_macro_call(¯o_call) { return Some(NameDefinition::Macro(macro_def)); } } - let path = name_ref.value.syntax().ancestors().find_map(ast::Path::cast)?; - let resolved = analyzer.resolve_path(sb.db, &path)?; + let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?; + let resolved = sema.resolve_path(&path)?; let res = match resolved { PathResolution::Def(def) => from_module_def(def), PathResolution::AssocItem(item) => { diff --git a/crates/ra_ide/src/references/rename.rs b/crates/ra_ide/src/references/rename.rs index bdb90020b..5b4bcf434 100644 --- a/crates/ra_ide/src/references/rename.rs +++ b/crates/ra_ide/src/references/rename.rs @@ -1,7 +1,7 @@ //! FIXME: write short doc here -use hir::ModuleSource; -use ra_db::{RelativePath, RelativePathBuf, SourceDatabase, SourceDatabaseExt}; +use hir::{ModuleSource, Semantics}; +use ra_db::{RelativePath, RelativePathBuf, SourceDatabaseExt}; use ra_ide_db::RootDatabase; use ra_syntax::{ algo::find_node_at_offset, ast, lex_single_valid_syntax_kind, AstNode, SyntaxKind, SyntaxNode, @@ -24,15 +24,16 @@ pub(crate) fn rename( _ => return None, } - let parse = db.parse(position.file_id); + let sema = Semantics::new(db); + let source_file = sema.parse(position.file_id); if let Some((ast_name, ast_module)) = - find_name_and_module_at_offset(parse.tree().syntax(), position) + find_name_and_module_at_offset(source_file.syntax(), position) { let range = ast_name.syntax().text_range(); - rename_mod(db, &ast_name, &ast_module, position, new_name) + rename_mod(&sema, &ast_name, &ast_module, position, new_name) .map(|info| RangeInfo::new(range, info)) } else { - rename_reference(db, position, new_name) + rename_reference(sema.db, position, new_name) } } @@ -54,7 +55,7 @@ fn source_edit_from_file_id_range( } fn rename_mod( - db: &RootDatabase, + sema: &Semantics, ast_name: &ast::Name, ast_module: &ast::Module, position: FilePosition, @@ -62,13 +63,12 @@ fn rename_mod( ) -> Option { let mut source_file_edits = Vec::new(); let mut file_system_edits = Vec::new(); - let module_src = hir::InFile { file_id: position.file_id.into(), value: ast_module.clone() }; - if let Some(module) = hir::SourceBinder::new(db).to_def(module_src) { - let src = module.definition_source(db); - let file_id = src.file_id.original_file(db); + if let Some(module) = sema.to_def(ast_module) { + let src = module.definition_source(sema.db); + let file_id = src.file_id.original_file(sema.db); match src.value { ModuleSource::SourceFile(..) => { - let mod_path: RelativePathBuf = db.file_relative_path(file_id); + let mod_path: RelativePathBuf = sema.db.file_relative_path(file_id); // mod is defined in path/to/dir/mod.rs let dst_path = if mod_path.file_stem() == Some("mod") { mod_path @@ -82,7 +82,7 @@ fn rename_mod( if let Some(path) = dst_path { let move_file = FileSystemEdit::MoveFile { src: file_id, - dst_source_root: db.file_source_root(position.file_id), + dst_source_root: sema.db.file_source_root(position.file_id), dst_path: path, }; file_system_edits.push(move_file); @@ -98,7 +98,7 @@ fn rename_mod( }; source_file_edits.push(edit); - if let Some(RangeInfo { range: _, info: refs }) = find_all_refs(db, position, None) { + if let Some(RangeInfo { range: _, info: refs }) = find_all_refs(sema.db, position, None) { let ref_edits = refs.references.into_iter().map(|reference| { source_edit_from_file_id_range( reference.file_range.file_id, diff --git a/crates/ra_ide/src/runnables.rs b/crates/ra_ide/src/runnables.rs index be2a67d0a..74877e90f 100644 --- a/crates/ra_ide/src/runnables.rs +++ b/crates/ra_ide/src/runnables.rs @@ -1,8 +1,7 @@ //! FIXME: write short doc here -use hir::{InFile, SourceBinder}; +use hir::Semantics; use itertools::Itertools; -use ra_db::SourceDatabase; use ra_ide_db::RootDatabase; use ra_syntax::{ ast::{self, AstNode, AttrsOwner, ModuleItemOwner, NameOwner}, @@ -42,46 +41,33 @@ pub enum RunnableKind { } pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec { - let parse = db.parse(file_id); - let mut sb = SourceBinder::new(db); - parse.tree().syntax().descendants().filter_map(|i| runnable(db, &mut sb, file_id, i)).collect() + let sema = Semantics::new(db); + let source_file = sema.parse(file_id); + source_file.syntax().descendants().filter_map(|i| runnable(&sema, i)).collect() } -fn runnable( - db: &RootDatabase, - source_binder: &mut SourceBinder, - file_id: FileId, - item: SyntaxNode, -) -> Option { +fn runnable(sema: &Semantics, item: SyntaxNode) -> Option { match_ast! { match item { - ast::FnDef(it) => { runnable_fn(db, source_binder, file_id, it) }, - ast::Module(it) => { runnable_mod(db, source_binder, file_id, it) }, - _ => { None }, + ast::FnDef(it) => { runnable_fn(sema, it) }, + ast::Module(it) => { runnable_mod(sema, it) }, + _ => None, } } } -fn runnable_fn( - db: &RootDatabase, - source_binder: &mut SourceBinder, - file_id: FileId, - fn_def: ast::FnDef, -) -> Option { +fn runnable_fn(sema: &Semantics, fn_def: ast::FnDef) -> Option { let name_string = fn_def.name()?.text().to_string(); let kind = if name_string == "main" { RunnableKind::Bin } else { - let test_id = if let Some(module) = source_binder - .to_def(InFile::new(file_id.into(), fn_def.clone())) - .map(|def| def.module(db)) - { + let test_id = if let Some(module) = sema.to_def(&fn_def).map(|def| def.module(sema.db)) { let path = module - .path_to_root(db) + .path_to_root(sema.db) .into_iter() .rev() - .filter_map(|it| it.name(db)) + .filter_map(|it| it.name(sema.db)) .map(|name| name.to_string()) .chain(std::iter::once(name_string)) .join("::"); @@ -115,12 +101,7 @@ fn has_test_related_attribute(fn_def: &ast::FnDef) -> bool { .any(|attribute_text| attribute_text.contains("test")) } -fn runnable_mod( - db: &RootDatabase, - source_binder: &mut SourceBinder, - file_id: FileId, - module: ast::Module, -) -> Option { +fn runnable_mod(sema: &Semantics, module: ast::Module) -> Option { let has_test_function = module .item_list()? .items() @@ -133,9 +114,10 @@ fn runnable_mod( return None; } let range = module.syntax().text_range(); - let module = source_binder.to_def(InFile::new(file_id.into(), module))?; + let module = sema.to_def(&module)?; - let path = module.path_to_root(db).into_iter().rev().filter_map(|it| it.name(db)).join("::"); + let path = + module.path_to_root(sema.db).into_iter().rev().filter_map(|it| it.name(sema.db)).join("::"); Some(Runnable { range, kind: RunnableKind::TestMod { path } }) } diff --git a/crates/ra_ide/src/snapshots/rainbow_highlighting.html b/crates/ra_ide/src/snapshots/rainbow_highlighting.html index 95f038f00..d6a7da953 100644 --- a/crates/ra_ide/src/snapshots/rainbow_highlighting.html +++ b/crates/ra_ide/src/snapshots/rainbow_highlighting.html @@ -25,14 +25,14 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd .keyword\.control { color: #F0DFAF; font-weight: bold; }
fn main() {
-    let hello = "hello";
-    let x = hello.to_string();
-    let y = hello.to_string();
+    let hello = "hello";
+    let x = hello.to_string();
+    let y = hello.to_string();
 
-    let x = "other color please!";
-    let y = x.to_string();
+    let x = "other color please!";
+    let y = x.to_string();
 }
 
 fn bar() {
-    let mut hello = "hello";
+    let mut hello = "hello";
 }
\ No newline at end of file diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs index 9bc3ad448..987476d2c 100644 --- a/crates/ra_ide/src/syntax_highlighting.rs +++ b/crates/ra_ide/src/syntax_highlighting.rs @@ -1,8 +1,11 @@ //! FIXME: write short doc here -use hir::{HirFileId, InFile, Name, SourceAnalyzer, SourceBinder}; +use hir::{Name, Semantics}; use ra_db::SourceDatabase; -use ra_ide_db::{defs::NameDefinition, RootDatabase}; +use ra_ide_db::{ + defs::{classify_name, NameDefinition}, + RootDatabase, +}; use ra_prof::profile; use ra_syntax::{ ast, AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken, @@ -10,11 +13,7 @@ use ra_syntax::{ }; use rustc_hash::FxHashMap; -use crate::{ - expand::descend_into_macros_with_analyzer, - references::{classify_name, classify_name_ref}, - FileId, -}; +use crate::{references::classify_name_ref, FileId}; pub mod tags { pub const FIELD: &str = "field"; @@ -73,14 +72,11 @@ pub(crate) fn highlight( range: Option, ) -> Vec { let _p = profile("highlight"); + let sema = Semantics::new(db); + let root = sema.parse(file_id).syntax().clone(); - let parse = db.parse(file_id); - let root = parse.tree().syntax().clone(); - - let mut sb = SourceBinder::new(db); let mut bindings_shadow_count: FxHashMap = FxHashMap::default(); let mut res = Vec::new(); - let analyzer = sb.analyze(InFile::new(file_id.into(), &root), None); let mut in_macro_call = None; @@ -105,7 +101,7 @@ pub(crate) fn highlight( match node.kind() { MACRO_CALL => { in_macro_call = Some(node.clone()); - if let Some(range) = highlight_macro(InFile::new(file_id.into(), node)) { + if let Some(range) = highlight_macro(node) { res.push(HighlightedRange { range, tag: tags::MACRO, @@ -116,10 +112,9 @@ pub(crate) fn highlight( _ if in_macro_call.is_some() => { if let Some(token) = node.as_token() { if let Some((tag, binding_hash)) = highlight_token_tree( - &mut sb, - &analyzer, + &sema, &mut bindings_shadow_count, - InFile::new(file_id.into(), token.clone()), + token.clone(), ) { res.push(HighlightedRange { range: node.text_range(), @@ -130,11 +125,9 @@ pub(crate) fn highlight( } } _ => { - if let Some((tag, binding_hash)) = highlight_node( - &mut sb, - &mut bindings_shadow_count, - InFile::new(file_id.into(), node.clone()), - ) { + if let Some((tag, binding_hash)) = + highlight_node(&sema, &mut bindings_shadow_count, node.clone()) + { res.push(HighlightedRange { range: node.text_range(), tag, @@ -161,8 +154,8 @@ pub(crate) fn highlight( res } -fn highlight_macro(node: InFile) -> Option { - let macro_call = ast::MacroCall::cast(node.value.as_node()?.clone())?; +fn highlight_macro(node: SyntaxElement) -> Option { + let macro_call = ast::MacroCall::cast(node.as_node()?.clone())?; let path = macro_call.path()?; let name_ref = path.segment()?.name_ref()?; @@ -179,35 +172,34 @@ fn highlight_macro(node: InFile) -> Option { } fn highlight_token_tree( - sb: &mut SourceBinder, - analyzer: &SourceAnalyzer, + sema: &Semantics, bindings_shadow_count: &mut FxHashMap, - token: InFile, + token: SyntaxToken, ) -> Option<(&'static str, Option)> { - if token.value.parent().kind() != TOKEN_TREE { + if token.parent().kind() != TOKEN_TREE { return None; } - let token = descend_into_macros_with_analyzer(sb.db, analyzer, token); + let token = sema.descend_into_macros(token.clone()); let expanded = { - let parent = token.value.parent(); + let parent = token.parent(); // We only care Name and Name_ref - match (token.value.kind(), parent.kind()) { - (IDENT, NAME) | (IDENT, NAME_REF) => token.with_value(parent.into()), - _ => token.map(|it| it.into()), + match (token.kind(), parent.kind()) { + (IDENT, NAME) | (IDENT, NAME_REF) => parent.into(), + _ => token.into(), } }; - highlight_node(sb, bindings_shadow_count, expanded) + highlight_node(sema, bindings_shadow_count, expanded) } fn highlight_node( - sb: &mut SourceBinder, + sema: &Semantics, bindings_shadow_count: &mut FxHashMap, - node: InFile, + node: SyntaxElement, ) -> Option<(&'static str, Option)> { - let db = sb.db; + let db = sema.db; let mut binding_hash = None; - let tag = match node.value.kind() { + let tag = match node.kind() { FN_DEF => { bindings_shadow_count.clear(); return None; @@ -216,19 +208,18 @@ fn highlight_node( STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => tags::LITERAL_STRING, ATTR => tags::LITERAL_ATTRIBUTE, // Special-case field init shorthand - NAME_REF if node.value.parent().and_then(ast::RecordField::cast).is_some() => tags::FIELD, - NAME_REF if node.value.ancestors().any(|it| it.kind() == ATTR) => return None, + NAME_REF if node.parent().and_then(ast::RecordField::cast).is_some() => tags::FIELD, + NAME_REF if node.ancestors().any(|it| it.kind() == ATTR) => return None, NAME_REF => { - let name_ref = node.value.as_node().cloned().and_then(ast::NameRef::cast).unwrap(); - let name_kind = classify_name_ref(sb, node.with_value(&name_ref)); + let name_ref = node.as_node().cloned().and_then(ast::NameRef::cast).unwrap(); + let name_kind = classify_name_ref(sema, &name_ref); match name_kind { Some(name_kind) => { if let NameDefinition::Local(local) = &name_kind { if let Some(name) = local.name(db) { let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); - binding_hash = - Some(calc_binding_hash(node.file_id, &name, *shadow_count)) + binding_hash = Some(calc_binding_hash(&name, *shadow_count)) } }; @@ -238,14 +229,14 @@ fn highlight_node( } } NAME => { - let name = node.value.as_node().cloned().and_then(ast::Name::cast).unwrap(); - let name_kind = classify_name(sb, node.with_value(&name)); + let name = node.as_node().cloned().and_then(ast::Name::cast).unwrap(); + let name_kind = classify_name(sema, &name); if let Some(NameDefinition::Local(local)) = &name_kind { if let Some(name) = local.name(db) { let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); *shadow_count += 1; - binding_hash = Some(calc_binding_hash(node.file_id, &name, *shadow_count)) + binding_hash = Some(calc_binding_hash(&name, *shadow_count)) } }; @@ -272,7 +263,7 @@ fn highlight_node( return Some((tag, binding_hash)); - fn calc_binding_hash(file_id: HirFileId, name: &Name, shadow_count: u32) -> u64 { + fn calc_binding_hash(name: &Name, shadow_count: u32) -> u64 { fn hash(x: T) -> u64 { use std::{collections::hash_map::DefaultHasher, hash::Hasher}; @@ -281,7 +272,7 @@ fn highlight_node( hasher.finish() } - hash((file_id, name, shadow_count)) + hash((name, shadow_count)) } } -- cgit v1.2.3