From 1b0c7701cc97cd7bef8bb9729011d4cf291a60c5 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Thu, 13 Aug 2020 17:42:52 +0200 Subject: Rename ra_ide -> ide --- crates/ide/Cargo.toml | 35 + crates/ide/src/call_hierarchy.rs | 393 ++++ crates/ide/src/call_info.rs | 742 ++++++ crates/ide/src/completion.rs | 206 ++ crates/ide/src/completion/complete_attribute.rs | 644 +++++ crates/ide/src/completion/complete_dot.rs | 416 ++++ crates/ide/src/completion/complete_fn_param.rs | 135 ++ crates/ide/src/completion/complete_keyword.rs | 536 +++++ .../completion/complete_macro_in_item_position.rs | 41 + crates/ide/src/completion/complete_pattern.rs | 88 + crates/ide/src/completion/complete_postfix.rs | 378 +++ .../ide/src/completion/complete_qualified_path.rs | 733 ++++++ crates/ide/src/completion/complete_record.rs | 226 ++ crates/ide/src/completion/complete_snippet.rs | 116 + crates/ide/src/completion/complete_trait_impl.rs | 488 ++++ .../src/completion/complete_unqualified_path.rs | 658 ++++++ crates/ide/src/completion/completion_config.rs | 35 + crates/ide/src/completion/completion_context.rs | 465 ++++ crates/ide/src/completion/completion_item.rs | 384 +++ crates/ide/src/completion/patterns.rs | 194 ++ crates/ide/src/completion/presentation.rs | 1229 ++++++++++ crates/ide/src/completion/test_utils.rs | 114 + crates/ide/src/diagnostics.rs | 678 ++++++ crates/ide/src/diagnostics/diagnostics_with_fix.rs | 171 ++ crates/ide/src/display.rs | 83 + crates/ide/src/display/navigation_target.rs | 491 ++++ crates/ide/src/display/short_label.rs | 111 + crates/ide/src/expand_macro.rs | 283 +++ crates/ide/src/extend_selection.rs | 654 ++++++ crates/ide/src/file_structure.rs | 431 ++++ crates/ide/src/folding_ranges.rs | 422 ++++ crates/ide/src/goto_definition.rs | 989 ++++++++ crates/ide/src/goto_implementation.rs | 229 ++ crates/ide/src/goto_type_definition.rs | 151 ++ crates/ide/src/hover.rs | 2461 ++++++++++++++++++++ crates/ide/src/inlay_hints.rs | 927 ++++++++ crates/ide/src/join_lines.rs | 773 ++++++ crates/ide/src/lib.rs | 542 +++++ crates/ide/src/markup.rs | 38 + crates/ide/src/matching_brace.rs | 73 + crates/ide/src/mock_analysis.rs | 176 ++ crates/ide/src/parent_module.rs | 155 ++ crates/ide/src/prime_caches.rs | 12 + crates/ide/src/references.rs | 694 ++++++ crates/ide/src/references/rename.rs | 1010 ++++++++ crates/ide/src/runnables.rs | 883 +++++++ crates/ide/src/status.rs | 145 ++ crates/ide/src/syntax_highlighting.rs | 872 +++++++ crates/ide/src/syntax_highlighting/html.rs | 97 + crates/ide/src/syntax_highlighting/injection.rs | 187 ++ crates/ide/src/syntax_highlighting/tags.rs | 203 ++ crates/ide/src/syntax_highlighting/tests.rs | 445 ++++ crates/ide/src/syntax_tree.rs | 359 +++ crates/ide/src/typing.rs | 364 +++ crates/ide/src/typing/on_enter.rs | 256 ++ crates/ide/test_data/highlight_doctest.html | 102 + crates/ide/test_data/highlight_extern_crate.html | 40 + crates/ide/test_data/highlight_injection.html | 48 + crates/ide/test_data/highlight_strings.html | 96 + crates/ide/test_data/highlight_unsafe.html | 99 + crates/ide/test_data/highlighting.html | 131 ++ crates/ide/test_data/rainbow_highlighting.html | 49 + 62 files changed, 24186 insertions(+) create mode 100644 crates/ide/Cargo.toml create mode 100644 crates/ide/src/call_hierarchy.rs create mode 100644 crates/ide/src/call_info.rs create mode 100644 crates/ide/src/completion.rs create mode 100644 crates/ide/src/completion/complete_attribute.rs create mode 100644 crates/ide/src/completion/complete_dot.rs create mode 100644 crates/ide/src/completion/complete_fn_param.rs create mode 100644 crates/ide/src/completion/complete_keyword.rs create mode 100644 crates/ide/src/completion/complete_macro_in_item_position.rs create mode 100644 crates/ide/src/completion/complete_pattern.rs create mode 100644 crates/ide/src/completion/complete_postfix.rs create mode 100644 crates/ide/src/completion/complete_qualified_path.rs create mode 100644 crates/ide/src/completion/complete_record.rs create mode 100644 crates/ide/src/completion/complete_snippet.rs create mode 100644 crates/ide/src/completion/complete_trait_impl.rs create mode 100644 crates/ide/src/completion/complete_unqualified_path.rs create mode 100644 crates/ide/src/completion/completion_config.rs create mode 100644 crates/ide/src/completion/completion_context.rs create mode 100644 crates/ide/src/completion/completion_item.rs create mode 100644 crates/ide/src/completion/patterns.rs create mode 100644 crates/ide/src/completion/presentation.rs create mode 100644 crates/ide/src/completion/test_utils.rs create mode 100644 crates/ide/src/diagnostics.rs create mode 100644 crates/ide/src/diagnostics/diagnostics_with_fix.rs create mode 100644 crates/ide/src/display.rs create mode 100644 crates/ide/src/display/navigation_target.rs create mode 100644 crates/ide/src/display/short_label.rs create mode 100644 crates/ide/src/expand_macro.rs create mode 100644 crates/ide/src/extend_selection.rs create mode 100644 crates/ide/src/file_structure.rs create mode 100644 crates/ide/src/folding_ranges.rs create mode 100644 crates/ide/src/goto_definition.rs create mode 100644 crates/ide/src/goto_implementation.rs create mode 100644 crates/ide/src/goto_type_definition.rs create mode 100644 crates/ide/src/hover.rs create mode 100644 crates/ide/src/inlay_hints.rs create mode 100644 crates/ide/src/join_lines.rs create mode 100644 crates/ide/src/lib.rs create mode 100644 crates/ide/src/markup.rs create mode 100644 crates/ide/src/matching_brace.rs create mode 100644 crates/ide/src/mock_analysis.rs create mode 100644 crates/ide/src/parent_module.rs create mode 100644 crates/ide/src/prime_caches.rs create mode 100644 crates/ide/src/references.rs create mode 100644 crates/ide/src/references/rename.rs create mode 100644 crates/ide/src/runnables.rs create mode 100644 crates/ide/src/status.rs create mode 100644 crates/ide/src/syntax_highlighting.rs create mode 100644 crates/ide/src/syntax_highlighting/html.rs create mode 100644 crates/ide/src/syntax_highlighting/injection.rs create mode 100644 crates/ide/src/syntax_highlighting/tags.rs create mode 100644 crates/ide/src/syntax_highlighting/tests.rs create mode 100644 crates/ide/src/syntax_tree.rs create mode 100644 crates/ide/src/typing.rs create mode 100644 crates/ide/src/typing/on_enter.rs create mode 100644 crates/ide/test_data/highlight_doctest.html create mode 100644 crates/ide/test_data/highlight_extern_crate.html create mode 100644 crates/ide/test_data/highlight_injection.html create mode 100644 crates/ide/test_data/highlight_strings.html create mode 100644 crates/ide/test_data/highlight_unsafe.html create mode 100644 crates/ide/test_data/highlighting.html create mode 100644 crates/ide/test_data/rainbow_highlighting.html (limited to 'crates/ide') diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml new file mode 100644 index 000000000..e4b970c73 --- /dev/null +++ b/crates/ide/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "ide" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +either = "1.5.3" +indexmap = "1.3.2" +itertools = "0.9.0" +log = "0.4.8" +rustc-hash = "1.1.0" +oorandom = "11.1.2" + +stdx = { path = "../stdx" } +syntax = { path = "../syntax" } +text_edit = { path = "../text_edit" } +base_db = { path = "../base_db" } +ide_db = { path = "../ide_db" } +cfg = { path = "../cfg" } +profile = { path = "../profile" } +test_utils = { path = "../test_utils" } +assists = { path = "../assists" } +ssr = { path = "../ssr" } + +# ide should depend only on the top-level `hir` package. if you need +# something from some `hir_xxx` subpackage, reexport the API via `hir`. +hir = { path = "../hir" } + +[dev-dependencies] +expect = { path = "../expect" } diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs new file mode 100644 index 000000000..58e26b94c --- /dev/null +++ b/crates/ide/src/call_hierarchy.rs @@ -0,0 +1,393 @@ +//! Entry point for call-hierarchy + +use indexmap::IndexMap; + +use hir::Semantics; +use ide_db::RootDatabase; +use syntax::{ast, match_ast, AstNode, TextRange}; + +use crate::{ + call_info::FnCallNode, display::ToNav, goto_definition, references, FilePosition, + NavigationTarget, RangeInfo, +}; + +#[derive(Debug, Clone)] +pub struct CallItem { + pub target: NavigationTarget, + pub ranges: Vec, +} + +impl CallItem { + #[cfg(test)] + pub(crate) fn assert_match(&self, expected: &str) { + let actual = self.debug_render(); + test_utils::assert_eq_text!(expected.trim(), actual.trim(),); + } + + #[cfg(test)] + pub(crate) fn debug_render(&self) -> String { + format!("{} : {:?}", self.target.debug_render(), self.ranges) + } +} + +pub(crate) fn call_hierarchy( + db: &RootDatabase, + position: FilePosition, +) -> Option>> { + goto_definition::goto_definition(db, position) +} + +pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Option> { + let sema = Semantics::new(db); + + // 1. Find all refs + // 2. Loop through refs and determine unique fndef. This will become our `from: CallHierarchyItem,` in the reply. + // 3. Add ranges relative to the start of the fndef. + let refs = references::find_all_refs(&sema, position, None)?; + + let mut calls = CallLocations::default(); + + for reference in refs.info.references() { + let file_id = reference.file_range.file_id; + let file = sema.parse(file_id); + let file = file.syntax(); + let token = file.token_at_offset(reference.file_range.range.start()).next()?; + let token = sema.descend_into_macros(token); + let syntax = token.parent(); + + // This target is the containing function + if let Some(nav) = syntax.ancestors().find_map(|node| { + match_ast! { + match node { + ast::Fn(it) => { + let def = sema.to_def(&it)?; + Some(def.to_nav(sema.db)) + }, + _ => None, + } + } + }) { + let relative_range = reference.file_range.range; + calls.add(&nav, relative_range); + } + } + + Some(calls.into_items()) +} + +pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option> { + let sema = Semantics::new(db); + let file_id = position.file_id; + let file = sema.parse(file_id); + let file = file.syntax(); + let token = file.token_at_offset(position.offset).next()?; + let token = sema.descend_into_macros(token); + let syntax = token.parent(); + + let mut calls = CallLocations::default(); + + syntax + .descendants() + .filter_map(|node| FnCallNode::with_node_exact(&node)) + .filter_map(|call_node| { + let name_ref = call_node.name_ref()?; + + if let Some(func_target) = match &call_node { + FnCallNode::CallExpr(expr) => { + //FIXME: Type::as_callable is broken + let callable = sema.type_of_expr(&expr.expr()?)?.as_callable(db)?; + match callable.kind() { + hir::CallableKind::Function(it) => { + let fn_def: hir::Function = it.into(); + let nav = fn_def.to_nav(db); + Some(nav) + } + _ => None, + } + } + FnCallNode::MethodCallExpr(expr) => { + let function = sema.resolve_method_call(&expr)?; + Some(function.to_nav(db)) + } + } { + Some((func_target, name_ref.syntax().text_range())) + } else { + None + } + }) + .for_each(|(nav, range)| calls.add(&nav, range)); + + Some(calls.into_items()) +} + +#[derive(Default)] +struct CallLocations { + funcs: IndexMap>, +} + +impl CallLocations { + fn add(&mut self, target: &NavigationTarget, range: TextRange) { + self.funcs.entry(target.clone()).or_default().push(range); + } + + fn into_items(self) -> Vec { + self.funcs.into_iter().map(|(target, ranges)| CallItem { target, ranges }).collect() + } +} + +#[cfg(test)] +mod tests { + use base_db::FilePosition; + + use crate::mock_analysis::analysis_and_position; + + fn check_hierarchy( + ra_fixture: &str, + expected: &str, + expected_incoming: &[&str], + expected_outgoing: &[&str], + ) { + let (analysis, pos) = analysis_and_position(ra_fixture); + + let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info; + assert_eq!(navs.len(), 1); + let nav = navs.pop().unwrap(); + nav.assert_match(expected); + + let item_pos = + FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() }; + let incoming_calls = analysis.incoming_calls(item_pos).unwrap().unwrap(); + assert_eq!(incoming_calls.len(), expected_incoming.len()); + + for call in 0..incoming_calls.len() { + incoming_calls[call].assert_match(expected_incoming[call]); + } + + let outgoing_calls = analysis.outgoing_calls(item_pos).unwrap().unwrap(); + assert_eq!(outgoing_calls.len(), expected_outgoing.len()); + + for call in 0..outgoing_calls.len() { + outgoing_calls[call].assert_match(expected_outgoing[call]); + } + } + + #[test] + fn test_call_hierarchy_on_ref() { + check_hierarchy( + r#" +//- /lib.rs +fn callee() {} +fn caller() { + call<|>ee(); +} +"#, + "callee FN FileId(1) 0..14 3..9", + &["caller FN FileId(1) 15..44 18..24 : [33..39]"], + &[], + ); + } + + #[test] + fn test_call_hierarchy_on_def() { + check_hierarchy( + r#" +//- /lib.rs +fn call<|>ee() {} +fn caller() { + callee(); +} +"#, + "callee FN FileId(1) 0..14 3..9", + &["caller FN FileId(1) 15..44 18..24 : [33..39]"], + &[], + ); + } + + #[test] + fn test_call_hierarchy_in_same_fn() { + check_hierarchy( + r#" +//- /lib.rs +fn callee() {} +fn caller() { + call<|>ee(); + callee(); +} +"#, + "callee FN FileId(1) 0..14 3..9", + &["caller FN FileId(1) 15..58 18..24 : [33..39, 47..53]"], + &[], + ); + } + + #[test] + fn test_call_hierarchy_in_different_fn() { + check_hierarchy( + r#" +//- /lib.rs +fn callee() {} +fn caller1() { + call<|>ee(); +} + +fn caller2() { + callee(); +} +"#, + "callee FN FileId(1) 0..14 3..9", + &[ + "caller1 FN FileId(1) 15..45 18..25 : [34..40]", + "caller2 FN FileId(1) 47..77 50..57 : [66..72]", + ], + &[], + ); + } + + #[test] + fn test_call_hierarchy_in_tests_mod() { + check_hierarchy( + r#" +//- /lib.rs cfg:test +fn callee() {} +fn caller1() { + call<|>ee(); +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_caller() { + callee(); + } +} +"#, + "callee FN FileId(1) 0..14 3..9", + &[ + "caller1 FN FileId(1) 15..45 18..25 : [34..40]", + "test_caller FN FileId(1) 95..149 110..121 : [134..140]", + ], + &[], + ); + } + + #[test] + fn test_call_hierarchy_in_different_files() { + check_hierarchy( + r#" +//- /lib.rs +mod foo; +use foo::callee; + +fn caller() { + call<|>ee(); +} + +//- /foo/mod.rs +pub fn callee() {} +"#, + "callee FN FileId(2) 0..18 7..13", + &["caller FN FileId(1) 27..56 30..36 : [45..51]"], + &[], + ); + } + + #[test] + fn test_call_hierarchy_outgoing() { + check_hierarchy( + r#" +//- /lib.rs +fn callee() {} +fn call<|>er() { + callee(); + callee(); +} +"#, + "caller FN FileId(1) 15..58 18..24", + &[], + &["callee FN FileId(1) 0..14 3..9 : [33..39, 47..53]"], + ); + } + + #[test] + fn test_call_hierarchy_outgoing_in_different_files() { + check_hierarchy( + r#" +//- /lib.rs +mod foo; +use foo::callee; + +fn call<|>er() { + callee(); +} + +//- /foo/mod.rs +pub fn callee() {} +"#, + "caller FN FileId(1) 27..56 30..36", + &[], + &["callee FN FileId(2) 0..18 7..13 : [45..51]"], + ); + } + + #[test] + fn test_call_hierarchy_incoming_outgoing() { + check_hierarchy( + r#" +//- /lib.rs +fn caller1() { + call<|>er2(); +} + +fn caller2() { + caller3(); +} + +fn caller3() { + +} +"#, + "caller2 FN FileId(1) 33..64 36..43", + &["caller1 FN FileId(1) 0..31 3..10 : [19..26]"], + &["caller3 FN FileId(1) 66..83 69..76 : [52..59]"], + ); + } + + #[test] + fn test_call_hierarchy_issue_5103() { + check_hierarchy( + r#" +fn a() { + b() +} + +fn b() {} + +fn main() { + a<|>() +} +"#, + "a FN FileId(1) 0..18 3..4", + &["main FN FileId(1) 31..52 34..38 : [47..48]"], + &["b FN FileId(1) 20..29 23..24 : [13..14]"], + ); + + check_hierarchy( + r#" +fn a() { + b<|>() +} + +fn b() {} + +fn main() { + a() +} +"#, + "b FN FileId(1) 20..29 23..24", + &["a FN FileId(1) 0..18 3..4 : [13..14]"], + &[], + ); + } +} diff --git a/crates/ide/src/call_info.rs b/crates/ide/src/call_info.rs new file mode 100644 index 000000000..86abd2d8c --- /dev/null +++ b/crates/ide/src/call_info.rs @@ -0,0 +1,742 @@ +//! FIXME: write short doc here +use either::Either; +use hir::{Docs, HirDisplay, Semantics, Type}; +use ide_db::RootDatabase; +use stdx::format_to; +use syntax::{ + ast::{self, ArgListOwner}, + match_ast, AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize, +}; +use test_utils::mark; + +use crate::FilePosition; + +/// Contains information about a call site. Specifically the +/// `FunctionSignature`and current parameter. +#[derive(Debug)] +pub struct CallInfo { + pub doc: Option, + pub signature: String, + pub active_parameter: Option, + parameters: Vec, +} + +impl CallInfo { + pub fn parameter_labels(&self) -> impl Iterator + '_ { + self.parameters.iter().map(move |&it| &self.signature[it]) + } + pub fn parameter_ranges(&self) -> &[TextRange] { + &self.parameters + } + fn push_param(&mut self, param: &str) { + if !self.signature.ends_with('(') { + self.signature.push_str(", "); + } + let start = TextSize::of(&self.signature); + self.signature.push_str(param); + let end = TextSize::of(&self.signature); + self.parameters.push(TextRange::new(start, end)) + } +} + +/// Computes parameter information for the given call expression. +pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option { + let sema = Semantics::new(db); + let file = sema.parse(position.file_id); + let file = file.syntax(); + let token = file.token_at_offset(position.offset).next()?; + let token = sema.descend_into_macros(token); + + let (callable, active_parameter) = call_info_impl(&sema, token)?; + + let mut res = + CallInfo { doc: None, signature: String::new(), parameters: vec![], active_parameter }; + + match callable.kind() { + hir::CallableKind::Function(func) => { + res.doc = func.docs(db).map(|it| it.as_str().to_string()); + format_to!(res.signature, "fn {}", func.name(db)); + } + hir::CallableKind::TupleStruct(strukt) => { + res.doc = strukt.docs(db).map(|it| it.as_str().to_string()); + format_to!(res.signature, "struct {}", strukt.name(db)); + } + hir::CallableKind::TupleEnumVariant(variant) => { + res.doc = variant.docs(db).map(|it| it.as_str().to_string()); + format_to!( + res.signature, + "enum {}::{}", + variant.parent_enum(db).name(db), + variant.name(db) + ); + } + hir::CallableKind::Closure => (), + } + + res.signature.push('('); + { + if let Some(self_param) = callable.receiver_param(db) { + format_to!(res.signature, "{}", self_param) + } + let mut buf = String::new(); + for (pat, ty) in callable.params(db) { + buf.clear(); + if let Some(pat) = pat { + match pat { + Either::Left(_self) => format_to!(buf, "self: "), + Either::Right(pat) => format_to!(buf, "{}: ", pat), + } + } + format_to!(buf, "{}", ty.display(db)); + res.push_param(&buf); + } + } + res.signature.push(')'); + + match callable.kind() { + hir::CallableKind::Function(_) | hir::CallableKind::Closure => { + let ret_type = callable.return_type(); + if !ret_type.is_unit() { + format_to!(res.signature, " -> {}", ret_type.display(db)); + } + } + hir::CallableKind::TupleStruct(_) | hir::CallableKind::TupleEnumVariant(_) => {} + } + Some(res) +} + +fn call_info_impl( + sema: &Semantics, + token: SyntaxToken, +) -> Option<(hir::Callable, Option)> { + // Find the calling expression and it's NameRef + let calling_node = FnCallNode::with_node(&token.parent())?; + + let callable = match &calling_node { + FnCallNode::CallExpr(call) => sema.type_of_expr(&call.expr()?)?.as_callable(sema.db)?, + FnCallNode::MethodCallExpr(call) => sema.resolve_method_call_as_callable(call)?, + }; + let active_param = if let Some(arg_list) = calling_node.arg_list() { + // Number of arguments specified at the call site + let num_args_at_callsite = arg_list.args().count(); + + let arg_list_range = arg_list.syntax().text_range(); + if !arg_list_range.contains_inclusive(token.text_range().start()) { + mark::hit!(call_info_bad_offset); + return None; + } + let param = std::cmp::min( + num_args_at_callsite, + arg_list + .args() + .take_while(|arg| arg.syntax().text_range().end() <= token.text_range().start()) + .count(), + ); + + Some(param) + } else { + None + }; + Some((callable, active_param)) +} + +#[derive(Debug)] +pub(crate) struct ActiveParameter { + pub(crate) ty: Type, + pub(crate) name: String, +} + +impl ActiveParameter { + pub(crate) fn at(db: &RootDatabase, position: FilePosition) -> Option { + let sema = Semantics::new(db); + let file = sema.parse(position.file_id); + let file = file.syntax(); + let token = file.token_at_offset(position.offset).next()?; + let token = sema.descend_into_macros(token); + Self::at_token(&sema, token) + } + + pub(crate) fn at_token(sema: &Semantics, token: SyntaxToken) -> Option { + let (signature, active_parameter) = call_info_impl(&sema, token)?; + + let idx = active_parameter?; + let mut params = signature.params(sema.db); + if !(idx < params.len()) { + mark::hit!(too_many_arguments); + return None; + } + let (pat, ty) = params.swap_remove(idx); + let name = pat?.to_string(); + Some(ActiveParameter { ty, name }) + } +} + +#[derive(Debug)] +pub(crate) enum FnCallNode { + CallExpr(ast::CallExpr), + MethodCallExpr(ast::MethodCallExpr), +} + +impl FnCallNode { + fn with_node(syntax: &SyntaxNode) -> Option { + syntax.ancestors().find_map(|node| { + match_ast! { + match node { + ast::CallExpr(it) => Some(FnCallNode::CallExpr(it)), + ast::MethodCallExpr(it) => { + let arg_list = it.arg_list()?; + if !arg_list.syntax().text_range().contains_range(syntax.text_range()) { + return None; + } + Some(FnCallNode::MethodCallExpr(it)) + }, + _ => None, + } + } + }) + } + + pub(crate) fn with_node_exact(node: &SyntaxNode) -> Option { + match_ast! { + match node { + ast::CallExpr(it) => Some(FnCallNode::CallExpr(it)), + ast::MethodCallExpr(it) => Some(FnCallNode::MethodCallExpr(it)), + _ => None, + } + } + } + + pub(crate) fn name_ref(&self) -> Option { + match self { + FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()? { + ast::Expr::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?, + _ => return None, + }), + + FnCallNode::MethodCallExpr(call_expr) => { + call_expr.syntax().children().filter_map(ast::NameRef::cast).next() + } + } + } + + fn arg_list(&self) -> Option { + match self { + FnCallNode::CallExpr(expr) => expr.arg_list(), + FnCallNode::MethodCallExpr(expr) => expr.arg_list(), + } + } +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + use test_utils::mark; + + use crate::mock_analysis::analysis_and_position; + + fn check(ra_fixture: &str, expect: Expect) { + let (analysis, position) = analysis_and_position(ra_fixture); + let call_info = analysis.call_info(position).unwrap(); + let actual = match call_info { + Some(call_info) => { + let docs = match &call_info.doc { + None => "".to_string(), + Some(docs) => format!("{}\n------\n", docs.as_str()), + }; + let params = call_info + .parameter_labels() + .enumerate() + .map(|(i, param)| { + if Some(i) == call_info.active_parameter { + format!("<{}>", param) + } else { + param.to_string() + } + }) + .collect::>() + .join(", "); + format!("{}{}\n({})\n", docs, call_info.signature, params) + } + None => String::new(), + }; + expect.assert_eq(&actual); + } + + #[test] + fn test_fn_signature_two_args() { + check( + r#" +fn foo(x: u32, y: u32) -> u32 {x + y} +fn bar() { foo(<|>3, ); } +"#, + expect![[r#" + fn foo(x: u32, y: u32) -> u32 + (, y: u32) + "#]], + ); + check( + r#" +fn foo(x: u32, y: u32) -> u32 {x + y} +fn bar() { foo(3<|>, ); } +"#, + expect![[r#" + fn foo(x: u32, y: u32) -> u32 + (, y: u32) + "#]], + ); + check( + r#" +fn foo(x: u32, y: u32) -> u32 {x + y} +fn bar() { foo(3,<|> ); } +"#, + expect![[r#" + fn foo(x: u32, y: u32) -> u32 + (x: u32, ) + "#]], + ); + check( + r#" +fn foo(x: u32, y: u32) -> u32 {x + y} +fn bar() { foo(3, <|>); } +"#, + expect![[r#" + fn foo(x: u32, y: u32) -> u32 + (x: u32, ) + "#]], + ); + } + + #[test] + fn test_fn_signature_two_args_empty() { + check( + r#" +fn foo(x: u32, y: u32) -> u32 {x + y} +fn bar() { foo(<|>); } +"#, + expect![[r#" + fn foo(x: u32, y: u32) -> u32 + (, y: u32) + "#]], + ); + } + + #[test] + fn test_fn_signature_two_args_first_generics() { + check( + r#" +fn foo(x: T, y: U) -> u32 + where T: Copy + Display, U: Debug +{ x + y } + +fn bar() { foo(<|>3, ); } +"#, + expect![[r#" + fn foo(x: i32, y: {unknown}) -> u32 + (, y: {unknown}) + "#]], + ); + } + + #[test] + fn test_fn_signature_no_params() { + check( + r#" +fn foo() -> T where T: Copy + Display {} +fn bar() { foo(<|>); } +"#, + expect![[r#" + fn foo() -> {unknown} + () + "#]], + ); + } + + #[test] + fn test_fn_signature_for_impl() { + check( + r#" +struct F; +impl F { pub fn new() { } } +fn bar() { + let _ : F = F::new(<|>); +} +"#, + expect![[r#" + fn new() + () + "#]], + ); + } + + #[test] + fn test_fn_signature_for_method_self() { + check( + r#" +struct S; +impl S { pub fn do_it(&self) {} } + +fn bar() { + let s: S = S; + s.do_it(<|>); +} +"#, + expect![[r#" + fn do_it(&self) + () + "#]], + ); + } + + #[test] + fn test_fn_signature_for_method_with_arg() { + check( + r#" +struct S; +impl S { + fn foo(&self, x: i32) {} +} + +fn main() { S.foo(<|>); } +"#, + expect![[r#" + fn foo(&self, x: i32) + () + "#]], + ); + } + + #[test] + fn test_fn_signature_for_method_with_arg_as_assoc_fn() { + check( + r#" +struct S; +impl S { + fn foo(&self, x: i32) {} +} + +fn main() { S::foo(<|>); } +"#, + expect![[r#" + fn foo(self: &S, x: i32) + (, x: i32) + "#]], + ); + } + + #[test] + fn test_fn_signature_with_docs_simple() { + check( + r#" +/// test +// non-doc-comment +fn foo(j: u32) -> u32 { + j +} + +fn bar() { + let _ = foo(<|>); +} +"#, + expect![[r#" + test + ------ + fn foo(j: u32) -> u32 + () + "#]], + ); + } + + #[test] + fn test_fn_signature_with_docs() { + check( + r#" +/// Adds one to the number given. +/// +/// # Examples +/// +/// ``` +/// let five = 5; +/// +/// assert_eq!(6, my_crate::add_one(5)); +/// ``` +pub fn add_one(x: i32) -> i32 { + x + 1 +} + +pub fn do() { + add_one(<|> +}"#, + expect![[r##" + Adds one to the number given. + + # Examples + + ``` + let five = 5; + + assert_eq!(6, my_crate::add_one(5)); + ``` + ------ + fn add_one(x: i32) -> i32 + () + "##]], + ); + } + + #[test] + fn test_fn_signature_with_docs_impl() { + check( + r#" +struct addr; +impl addr { + /// Adds one to the number given. + /// + /// # Examples + /// + /// ``` + /// let five = 5; + /// + /// assert_eq!(6, my_crate::add_one(5)); + /// ``` + pub fn add_one(x: i32) -> i32 { + x + 1 + } +} + +pub fn do_it() { + addr {}; + addr::add_one(<|>); +} +"#, + expect![[r##" + Adds one to the number given. + + # Examples + + ``` + let five = 5; + + assert_eq!(6, my_crate::add_one(5)); + ``` + ------ + fn add_one(x: i32) -> i32 + () + "##]], + ); + } + + #[test] + fn test_fn_signature_with_docs_from_actix() { + check( + r#" +struct WriteHandler; + +impl WriteHandler { + /// Method is called when writer emits error. + /// + /// If this method returns `ErrorAction::Continue` writer processing + /// continues otherwise stream processing stops. + fn error(&mut self, err: E, ctx: &mut Self::Context) -> Running { + Running::Stop + } + + /// Method is called when writer finishes. + /// + /// By default this method stops actor's `Context`. + fn finished(&mut self, ctx: &mut Self::Context) { + ctx.stop() + } +} + +pub fn foo(mut r: WriteHandler<()>) { + r.finished(<|>); +} +"#, + expect![[r#" + Method is called when writer finishes. + + By default this method stops actor's `Context`. + ------ + fn finished(&mut self, ctx: &mut {unknown}) + () + "#]], + ); + } + + #[test] + fn call_info_bad_offset() { + mark::check!(call_info_bad_offset); + check( + r#" +fn foo(x: u32, y: u32) -> u32 {x + y} +fn bar() { foo <|> (3, ); } +"#, + expect![[""]], + ); + } + + #[test] + fn test_nested_method_in_lambda() { + check( + r#" +struct Foo; +impl Foo { fn bar(&self, _: u32) { } } + +fn bar(_: u32) { } + +fn main() { + let foo = Foo; + std::thread::spawn(move || foo.bar(<|>)); +} +"#, + expect![[r#" + fn bar(&self, _: u32) + (<_: u32>) + "#]], + ); + } + + #[test] + fn works_for_tuple_structs() { + check( + r#" +/// A cool tuple struct +struct S(u32, i32); +fn main() { + let s = S(0, <|>); +} +"#, + expect![[r#" + A cool tuple struct + ------ + struct S(u32, i32) + (u32, ) + "#]], + ); + } + + #[test] + fn generic_struct() { + check( + r#" +struct S(T); +fn main() { + let s = S(<|>); +} +"#, + expect![[r#" + struct S({unknown}) + (<{unknown}>) + "#]], + ); + } + + #[test] + fn works_for_enum_variants() { + check( + r#" +enum E { + /// A Variant + A(i32), + /// Another + B, + /// And C + C { a: i32, b: i32 } +} + +fn main() { + let a = E::A(<|>); +} +"#, + expect![[r#" + A Variant + ------ + enum E::A(i32) + () + "#]], + ); + } + + #[test] + fn cant_call_struct_record() { + check( + r#" +struct S { x: u32, y: i32 } +fn main() { + let s = S(<|>); +} +"#, + expect![[""]], + ); + } + + #[test] + fn cant_call_enum_record() { + check( + r#" +enum E { + /// A Variant + A(i32), + /// Another + B, + /// And C + C { a: i32, b: i32 } +} + +fn main() { + let a = E::C(<|>); +} +"#, + expect![[""]], + ); + } + + #[test] + fn fn_signature_for_call_in_macro() { + check( + r#" +macro_rules! id { ($($tt:tt)*) => { $($tt)* } } +fn foo() { } +id! { + fn bar() { foo(<|>); } +} +"#, + expect![[r#" + fn foo() + () + "#]], + ); + } + + #[test] + fn call_info_for_lambdas() { + check( + r#" +struct S; +fn foo(s: S) -> i32 { 92 } +fn main() { + (|s| foo(s))(<|>) +} + "#, + expect![[r#" + (S) -> i32 + () + "#]], + ) + } + + #[test] + fn call_info_for_fn_ptr() { + check( + r#" +fn main(f: fn(i32, f64) -> char) { + f(0, <|>) +} + "#, + expect![[r#" + (i32, f64) -> char + (i32, ) + "#]], + ) + } +} diff --git a/crates/ide/src/completion.rs b/crates/ide/src/completion.rs new file mode 100644 index 000000000..7fb4d687e --- /dev/null +++ b/crates/ide/src/completion.rs @@ -0,0 +1,206 @@ +mod completion_config; +mod completion_item; +mod completion_context; +mod presentation; +mod patterns; +#[cfg(test)] +mod test_utils; + +mod complete_attribute; +mod complete_dot; +mod complete_record; +mod complete_pattern; +mod complete_fn_param; +mod complete_keyword; +mod complete_snippet; +mod complete_qualified_path; +mod complete_unqualified_path; +mod complete_postfix; +mod complete_macro_in_item_position; +mod complete_trait_impl; + +use ide_db::RootDatabase; + +use crate::{ + completion::{ + completion_context::CompletionContext, + completion_item::{CompletionKind, Completions}, + }, + FilePosition, +}; + +pub use crate::completion::{ + completion_config::CompletionConfig, + completion_item::{CompletionItem, CompletionItemKind, CompletionScore, InsertTextFormat}, +}; + +//FIXME: split the following feature into fine-grained features. + +// Feature: Magic Completions +// +// In addition to usual reference completion, rust-analyzer provides some ✨magic✨ +// completions as well: +// +// Keywords like `if`, `else` `while`, `loop` are completed with braces, and cursor +// is placed at the appropriate position. Even though `if` is easy to type, you +// still want to complete it, to get ` { }` for free! `return` is inserted with a +// space or `;` depending on the return type of the function. +// +// When completing a function call, `()` are automatically inserted. If a function +// takes arguments, the cursor is positioned inside the parenthesis. +// +// There are postfix completions, which can be triggered by typing something like +// `foo().if`. The word after `.` determines postfix completion. Possible variants are: +// +// - `expr.if` -> `if expr {}` or `if let ... {}` for `Option` or `Result` +// - `expr.match` -> `match expr {}` +// - `expr.while` -> `while expr {}` or `while let ... {}` for `Option` or `Result` +// - `expr.ref` -> `&expr` +// - `expr.refm` -> `&mut expr` +// - `expr.not` -> `!expr` +// - `expr.dbg` -> `dbg!(expr)` +// +// There also snippet completions: +// +// .Expressions +// - `pd` -> `eprintln!(" = {:?}", );` +// - `ppd` -> `eprintln!(" = {:#?}", );` +// +// .Items +// - `tfn` -> `#[test] fn feature(){}` +// - `tmod` -> +// ```rust +// #[cfg(test)] +// mod tests { +// use super::*; +// +// #[test] +// fn test_name() {} +// } +// ``` + +/// Main entry point for completion. We run completion as a two-phase process. +/// +/// First, we look at the position and collect a so-called `CompletionContext. +/// This is a somewhat messy process, because, during completion, syntax tree is +/// incomplete and can look really weird. +/// +/// Once the context is collected, we run a series of completion routines which +/// look at the context and produce completion items. One subtlety about this +/// phase is that completion engine should not filter by the substring which is +/// already present, it should give all possible variants for the identifier at +/// the caret. In other words, for +/// +/// ```no-run +/// fn f() { +/// let foo = 92; +/// let _ = bar<|> +/// } +/// ``` +/// +/// `foo` *should* be present among the completion variants. Filtering by +/// identifier prefix/fuzzy match should be done higher in the stack, together +/// with ordering of completions (currently this is done by the client). +pub(crate) fn completions( + db: &RootDatabase, + config: &CompletionConfig, + position: FilePosition, +) -> Option { + let ctx = CompletionContext::new(db, position, config)?; + + let mut acc = Completions::default(); + complete_attribute::complete_attribute(&mut acc, &ctx); + complete_fn_param::complete_fn_param(&mut acc, &ctx); + complete_keyword::complete_expr_keyword(&mut acc, &ctx); + complete_keyword::complete_use_tree_keyword(&mut acc, &ctx); + complete_snippet::complete_expr_snippet(&mut acc, &ctx); + complete_snippet::complete_item_snippet(&mut acc, &ctx); + complete_qualified_path::complete_qualified_path(&mut acc, &ctx); + complete_unqualified_path::complete_unqualified_path(&mut acc, &ctx); + complete_dot::complete_dot(&mut acc, &ctx); + complete_record::complete_record(&mut acc, &ctx); + complete_pattern::complete_pattern(&mut acc, &ctx); + complete_postfix::complete_postfix(&mut acc, &ctx); + complete_macro_in_item_position::complete_macro_in_item_position(&mut acc, &ctx); + complete_trait_impl::complete_trait_impl(&mut acc, &ctx); + + Some(acc) +} + +#[cfg(test)] +mod tests { + use crate::completion::completion_config::CompletionConfig; + use crate::mock_analysis::analysis_and_position; + + struct DetailAndDocumentation<'a> { + detail: &'a str, + documentation: &'a str, + } + + fn check_detail_and_documentation(ra_fixture: &str, expected: DetailAndDocumentation) { + let (analysis, position) = analysis_and_position(ra_fixture); + let config = CompletionConfig::default(); + let completions = analysis.completions(&config, position).unwrap().unwrap(); + for item in completions { + if item.detail() == Some(expected.detail) { + let opt = item.documentation(); + let doc = opt.as_ref().map(|it| it.as_str()); + assert_eq!(doc, Some(expected.documentation)); + return; + } + } + panic!("completion detail not found: {}", expected.detail) + } + + #[test] + fn test_completion_detail_from_macro_generated_struct_fn_doc_attr() { + check_detail_and_documentation( + r#" + //- /lib.rs + macro_rules! bar { + () => { + struct Bar; + impl Bar { + #[doc = "Do the foo"] + fn foo(&self) {} + } + } + } + + bar!(); + + fn foo() { + let bar = Bar; + bar.fo<|>; + } + "#, + DetailAndDocumentation { detail: "fn foo(&self)", documentation: "Do the foo" }, + ); + } + + #[test] + fn test_completion_detail_from_macro_generated_struct_fn_doc_comment() { + check_detail_and_documentation( + r#" + //- /lib.rs + macro_rules! bar { + () => { + struct Bar; + impl Bar { + /// Do the foo + fn foo(&self) {} + } + } + } + + bar!(); + + fn foo() { + let bar = Bar; + bar.fo<|>; + } + "#, + DetailAndDocumentation { detail: "fn foo(&self)", documentation: " Do the foo" }, + ); + } +} diff --git a/crates/ide/src/completion/complete_attribute.rs b/crates/ide/src/completion/complete_attribute.rs new file mode 100644 index 000000000..603d935de --- /dev/null +++ b/crates/ide/src/completion/complete_attribute.rs @@ -0,0 +1,644 @@ +//! Completion for attributes +//! +//! This module uses a bit of static metadata to provide completions +//! for built-in attributes. + +use rustc_hash::FxHashSet; +use syntax::{ast, AstNode, SyntaxKind}; + +use crate::completion::{ + completion_context::CompletionContext, + completion_item::{CompletionItem, CompletionItemKind, CompletionKind, Completions}, +}; + +pub(super) fn complete_attribute(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> { + let attribute = ctx.attribute_under_caret.as_ref()?; + match (attribute.path(), attribute.token_tree()) { + (Some(path), Some(token_tree)) if path.to_string() == "derive" => { + complete_derive(acc, ctx, token_tree) + } + (Some(path), Some(token_tree)) + if ["allow", "warn", "deny", "forbid"] + .iter() + .any(|lint_level| lint_level == &path.to_string()) => + { + complete_lint(acc, ctx, token_tree) + } + (_, Some(_token_tree)) => {} + _ => complete_attribute_start(acc, ctx, attribute), + } + Some(()) +} + +fn complete_attribute_start(acc: &mut Completions, ctx: &CompletionContext, attribute: &ast::Attr) { + for attr_completion in ATTRIBUTES { + let mut item = CompletionItem::new( + CompletionKind::Attribute, + ctx.source_range(), + attr_completion.label, + ) + .kind(CompletionItemKind::Attribute); + + if let Some(lookup) = attr_completion.lookup { + item = item.lookup_by(lookup); + } + + match (attr_completion.snippet, ctx.config.snippet_cap) { + (Some(snippet), Some(cap)) => { + item = item.insert_snippet(cap, snippet); + } + _ => {} + } + + if attribute.kind() == ast::AttrKind::Inner || !attr_completion.prefer_inner { + acc.add(item); + } + } +} + +struct AttrCompletion { + label: &'static str, + lookup: Option<&'static str>, + snippet: Option<&'static str>, + prefer_inner: bool, +} + +impl AttrCompletion { + const fn prefer_inner(self) -> AttrCompletion { + AttrCompletion { prefer_inner: true, ..self } + } +} + +const fn attr( + label: &'static str, + lookup: Option<&'static str>, + snippet: Option<&'static str>, +) -> AttrCompletion { + AttrCompletion { label, lookup, snippet, prefer_inner: false } +} + +const ATTRIBUTES: &[AttrCompletion] = &[ + attr("allow(…)", Some("allow"), Some("allow(${0:lint})")), + attr("cfg_attr(…)", Some("cfg_attr"), Some("cfg_attr(${1:predicate}, ${0:attr})")), + attr("cfg(…)", Some("cfg"), Some("cfg(${0:predicate})")), + attr("deny(…)", Some("deny"), Some("deny(${0:lint})")), + attr(r#"deprecated = "…""#, Some("deprecated"), Some(r#"deprecated = "${0:reason}""#)), + attr("derive(…)", Some("derive"), Some(r#"derive(${0:Debug})"#)), + attr(r#"doc = "…""#, Some("doc"), Some(r#"doc = "${0:docs}""#)), + attr("feature(…)", Some("feature"), Some("feature(${0:flag})")).prefer_inner(), + attr("forbid(…)", Some("forbid"), Some("forbid(${0:lint})")), + // FIXME: resolve through macro resolution? + attr("global_allocator", None, None).prefer_inner(), + attr(r#"ignore = "…""#, Some("ignore"), Some(r#"ignore = "${0:reason}""#)), + attr("inline(…)", Some("inline"), Some("inline(${0:lint})")), + attr(r#"link_name = "…""#, Some("link_name"), Some(r#"link_name = "${0:symbol_name}""#)), + attr("link", None, None), + attr("macro_export", None, None), + attr("macro_use", None, None), + attr(r#"must_use = "…""#, Some("must_use"), Some(r#"must_use = "${0:reason}""#)), + attr("no_mangle", None, None), + attr("no_std", None, None).prefer_inner(), + attr("non_exhaustive", None, None), + attr("panic_handler", None, None).prefer_inner(), + attr("path = \"…\"", Some("path"), Some("path =\"${0:path}\"")), + attr("proc_macro", None, None), + attr("proc_macro_attribute", None, None), + attr("proc_macro_derive(…)", Some("proc_macro_derive"), Some("proc_macro_derive(${0:Trait})")), + attr("recursion_limit = …", Some("recursion_limit"), Some("recursion_limit = ${0:128}")) + .prefer_inner(), + attr("repr(…)", Some("repr"), Some("repr(${0:C})")), + attr( + "should_panic(…)", + Some("should_panic"), + Some(r#"should_panic(expected = "${0:reason}")"#), + ), + attr( + r#"target_feature = "…""#, + Some("target_feature"), + Some("target_feature = \"${0:feature}\""), + ), + attr("test", None, None), + attr("used", None, None), + attr("warn(…)", Some("warn"), Some("warn(${0:lint})")), + attr( + r#"windows_subsystem = "…""#, + Some("windows_subsystem"), + Some(r#"windows_subsystem = "${0:subsystem}""#), + ) + .prefer_inner(), +]; + +fn complete_derive(acc: &mut Completions, ctx: &CompletionContext, derive_input: ast::TokenTree) { + if let Ok(existing_derives) = parse_comma_sep_input(derive_input) { + for derive_completion in DEFAULT_DERIVE_COMPLETIONS + .into_iter() + .filter(|completion| !existing_derives.contains(completion.label)) + { + let mut label = derive_completion.label.to_owned(); + for dependency in derive_completion + .dependencies + .into_iter() + .filter(|&&dependency| !existing_derives.contains(dependency)) + { + label.push_str(", "); + label.push_str(dependency); + } + acc.add( + CompletionItem::new(CompletionKind::Attribute, ctx.source_range(), label) + .kind(CompletionItemKind::Attribute), + ); + } + + for custom_derive_name in get_derive_names_in_scope(ctx).difference(&existing_derives) { + acc.add( + CompletionItem::new( + CompletionKind::Attribute, + ctx.source_range(), + custom_derive_name, + ) + .kind(CompletionItemKind::Attribute), + ); + } + } +} + +fn complete_lint(acc: &mut Completions, ctx: &CompletionContext, derive_input: ast::TokenTree) { + if let Ok(existing_lints) = parse_comma_sep_input(derive_input) { + for lint_completion in DEFAULT_LINT_COMPLETIONS + .into_iter() + .filter(|completion| !existing_lints.contains(completion.label)) + { + acc.add( + CompletionItem::new( + CompletionKind::Attribute, + ctx.source_range(), + lint_completion.label, + ) + .kind(CompletionItemKind::Attribute) + .detail(lint_completion.description), + ); + } + } +} + +fn parse_comma_sep_input(derive_input: ast::TokenTree) -> Result, ()> { + match (derive_input.left_delimiter_token(), derive_input.right_delimiter_token()) { + (Some(left_paren), Some(right_paren)) + if left_paren.kind() == SyntaxKind::L_PAREN + && right_paren.kind() == SyntaxKind::R_PAREN => + { + let mut input_derives = FxHashSet::default(); + let mut current_derive = String::new(); + for token in derive_input + .syntax() + .children_with_tokens() + .filter_map(|token| token.into_token()) + .skip_while(|token| token != &left_paren) + .skip(1) + .take_while(|token| token != &right_paren) + { + if SyntaxKind::COMMA == token.kind() { + if !current_derive.is_empty() { + input_derives.insert(current_derive); + current_derive = String::new(); + } + } else { + current_derive.push_str(token.to_string().trim()); + } + } + + if !current_derive.is_empty() { + input_derives.insert(current_derive); + } + Ok(input_derives) + } + _ => Err(()), + } +} + +fn get_derive_names_in_scope(ctx: &CompletionContext) -> FxHashSet { + let mut result = FxHashSet::default(); + ctx.scope.process_all_names(&mut |name, scope_def| { + if let hir::ScopeDef::MacroDef(mac) = scope_def { + if mac.is_derive_macro() { + result.insert(name.to_string()); + } + } + }); + result +} + +struct DeriveCompletion { + label: &'static str, + dependencies: &'static [&'static str], +} + +/// Standard Rust derives and the information about their dependencies +/// (the dependencies are needed so that the main derive don't break the compilation when added) +#[rustfmt::skip] +const DEFAULT_DERIVE_COMPLETIONS: &[DeriveCompletion] = &[ + DeriveCompletion { label: "Clone", dependencies: &[] }, + DeriveCompletion { label: "Copy", dependencies: &["Clone"] }, + DeriveCompletion { label: "Debug", dependencies: &[] }, + DeriveCompletion { label: "Default", dependencies: &[] }, + DeriveCompletion { label: "Hash", dependencies: &[] }, + DeriveCompletion { label: "PartialEq", dependencies: &[] }, + DeriveCompletion { label: "Eq", dependencies: &["PartialEq"] }, + DeriveCompletion { label: "PartialOrd", dependencies: &["PartialEq"] }, + DeriveCompletion { label: "Ord", dependencies: &["PartialOrd", "Eq", "PartialEq"] }, +]; + +struct LintCompletion { + label: &'static str, + description: &'static str, +} + +#[rustfmt::skip] +const DEFAULT_LINT_COMPLETIONS: &[LintCompletion] = &[ + LintCompletion { label: "absolute_paths_not_starting_with_crate", description: r#"fully qualified paths that start with a module name instead of `crate`, `self`, or an extern crate name"# }, + LintCompletion { label: "anonymous_parameters", description: r#"detects anonymous parameters"# }, + LintCompletion { label: "box_pointers", description: r#"use of owned (Box type) heap memory"# }, + LintCompletion { label: "deprecated_in_future", description: r#"detects use of items that will be deprecated in a future version"# }, + LintCompletion { label: "elided_lifetimes_in_paths", description: r#"hidden lifetime parameters in types are deprecated"# }, + LintCompletion { label: "explicit_outlives_requirements", description: r#"outlives requirements can be inferred"# }, + LintCompletion { label: "indirect_structural_match", description: r#"pattern with const indirectly referencing non-structural-match type"# }, + LintCompletion { label: "keyword_idents", description: r#"detects edition keywords being used as an identifier"# }, + LintCompletion { label: "macro_use_extern_crate", description: r#"the `#[macro_use]` attribute is now deprecated in favor of using macros via the module system"# }, + LintCompletion { label: "meta_variable_misuse", description: r#"possible meta-variable misuse at macro definition"# }, + LintCompletion { label: "missing_copy_implementations", description: r#"detects potentially-forgotten implementations of `Copy`"# }, + LintCompletion { label: "missing_crate_level_docs", description: r#"detects crates with no crate-level documentation"# }, + LintCompletion { label: "missing_debug_implementations", description: r#"detects missing implementations of Debug"# }, + LintCompletion { label: "missing_docs", description: r#"detects missing documentation for public members"# }, + LintCompletion { label: "missing_doc_code_examples", description: r#"detects publicly-exported items without code samples in their documentation"# }, + LintCompletion { label: "non_ascii_idents", description: r#"detects non-ASCII identifiers"# }, + LintCompletion { label: "private_doc_tests", description: r#"detects code samples in docs of private items not documented by rustdoc"# }, + LintCompletion { label: "single_use_lifetimes", description: r#"detects lifetime parameters that are only used once"# }, + LintCompletion { label: "trivial_casts", description: r#"detects trivial casts which could be removed"# }, + LintCompletion { label: "trivial_numeric_casts", description: r#"detects trivial casts of numeric types which could be removed"# }, + LintCompletion { label: "unaligned_references", description: r#"detects unaligned references to fields of packed structs"# }, + LintCompletion { label: "unreachable_pub", description: r#"`pub` items not reachable from crate root"# }, + LintCompletion { label: "unsafe_code", description: r#"usage of `unsafe` code"# }, + LintCompletion { label: "unsafe_op_in_unsafe_fn", description: r#"unsafe operations in unsafe functions without an explicit unsafe block are deprecated"# }, + LintCompletion { label: "unstable_features", description: r#"enabling unstable features (deprecated. do not use)"# }, + LintCompletion { label: "unused_crate_dependencies", description: r#"crate dependencies that are never used"# }, + LintCompletion { label: "unused_extern_crates", description: r#"extern crates that are never used"# }, + LintCompletion { label: "unused_import_braces", description: r#"unnecessary braces around an imported item"# }, + LintCompletion { label: "unused_lifetimes", description: r#"detects lifetime parameters that are never used"# }, + LintCompletion { label: "unused_qualifications", description: r#"detects unnecessarily qualified names"# }, + LintCompletion { label: "unused_results", description: r#"unused result of an expression in a statement"# }, + LintCompletion { label: "variant_size_differences", description: r#"detects enums with widely varying variant sizes"# }, + LintCompletion { label: "array_into_iter", description: r#"detects calling `into_iter` on arrays"# }, + LintCompletion { label: "asm_sub_register", description: r#"using only a subset of a register for inline asm inputs"# }, + LintCompletion { label: "bare_trait_objects", description: r#"suggest using `dyn Trait` for trait objects"# }, + LintCompletion { label: "bindings_with_variant_name", description: r#"detects pattern bindings with the same name as one of the matched variants"# }, + LintCompletion { label: "cenum_impl_drop_cast", description: r#"a C-like enum implementing Drop is cast"# }, + LintCompletion { label: "clashing_extern_declarations", description: r#"detects when an extern fn has been declared with the same name but different types"# }, + LintCompletion { label: "coherence_leak_check", description: r#"distinct impls distinguished only by the leak-check code"# }, + LintCompletion { label: "confusable_idents", description: r#"detects visually confusable pairs between identifiers"# }, + LintCompletion { label: "dead_code", description: r#"detect unused, unexported items"# }, + LintCompletion { label: "deprecated", description: r#"detects use of deprecated items"# }, + LintCompletion { label: "ellipsis_inclusive_range_patterns", description: r#"`...` range patterns are deprecated"# }, + LintCompletion { label: "exported_private_dependencies", description: r#"public interface leaks type from a private dependency"# }, + LintCompletion { label: "illegal_floating_point_literal_pattern", description: r#"floating-point literals cannot be used in patterns"# }, + LintCompletion { label: "improper_ctypes", description: r#"proper use of libc types in foreign modules"# }, + LintCompletion { label: "improper_ctypes_definitions", description: r#"proper use of libc types in foreign item definitions"# }, + LintCompletion { label: "incomplete_features", description: r#"incomplete features that may function improperly in some or all cases"# }, + LintCompletion { label: "inline_no_sanitize", description: r#"detects incompatible use of `#[inline(always)]` and `#[no_sanitize(...)]`"# }, + LintCompletion { label: "intra_doc_link_resolution_failure", description: r#"failures in resolving intra-doc link targets"# }, + LintCompletion { label: "invalid_codeblock_attributes", description: r#"codeblock attribute looks a lot like a known one"# }, + LintCompletion { label: "invalid_value", description: r#"an invalid value is being created (such as a NULL reference)"# }, + LintCompletion { label: "irrefutable_let_patterns", description: r#"detects irrefutable patterns in if-let and while-let statements"# }, + LintCompletion { label: "late_bound_lifetime_arguments", description: r#"detects generic lifetime arguments in path segments with late bound lifetime parameters"# }, + LintCompletion { label: "mixed_script_confusables", description: r#"detects Unicode scripts whose mixed script confusables codepoints are solely used"# }, + LintCompletion { label: "mutable_borrow_reservation_conflict", description: r#"reservation of a two-phased borrow conflicts with other shared borrows"# }, + LintCompletion { label: "non_camel_case_types", description: r#"types, variants, traits and type parameters should have camel case names"# }, + LintCompletion { label: "non_shorthand_field_patterns", description: r#"using `Struct { x: x }` instead of `Struct { x }` in a pattern"# }, + LintCompletion { label: "non_snake_case", description: r#"variables, methods, functions, lifetime parameters and modules should have snake case names"# }, + LintCompletion { label: "non_upper_case_globals", description: r#"static constants should have uppercase identifiers"# }, + LintCompletion { label: "no_mangle_generic_items", description: r#"generic items must be mangled"# }, + LintCompletion { label: "overlapping_patterns", description: r#"detects overlapping patterns"# }, + LintCompletion { label: "path_statements", description: r#"path statements with no effect"# }, + LintCompletion { label: "private_in_public", description: r#"detect private items in public interfaces not caught by the old implementation"# }, + LintCompletion { label: "proc_macro_derive_resolution_fallback", description: r#"detects proc macro derives using inaccessible names from parent modules"# }, + LintCompletion { label: "redundant_semicolons", description: r#"detects unnecessary trailing semicolons"# }, + LintCompletion { label: "renamed_and_removed_lints", description: r#"lints that have been renamed or removed"# }, + LintCompletion { label: "safe_packed_borrows", description: r#"safe borrows of fields of packed structs were erroneously allowed"# }, + LintCompletion { label: "stable_features", description: r#"stable features found in `#[feature]` directive"# }, + LintCompletion { label: "trivial_bounds", description: r#"these bounds don't depend on an type parameters"# }, + LintCompletion { label: "type_alias_bounds", description: r#"bounds in type aliases are not enforced"# }, + LintCompletion { label: "tyvar_behind_raw_pointer", description: r#"raw pointer to an inference variable"# }, + LintCompletion { label: "uncommon_codepoints", description: r#"detects uncommon Unicode codepoints in identifiers"# }, + LintCompletion { label: "unconditional_recursion", description: r#"functions that cannot return without calling themselves"# }, + LintCompletion { label: "unknown_lints", description: r#"unrecognized lint attribute"# }, + LintCompletion { label: "unnameable_test_items", description: r#"detects an item that cannot be named being marked as `#[test_case]`"# }, + LintCompletion { label: "unreachable_code", description: r#"detects unreachable code paths"# }, + LintCompletion { label: "unreachable_patterns", description: r#"detects unreachable patterns"# }, + LintCompletion { label: "unstable_name_collisions", description: r#"detects name collision with an existing but unstable method"# }, + LintCompletion { label: "unused_allocation", description: r#"detects unnecessary allocations that can be eliminated"# }, + LintCompletion { label: "unused_assignments", description: r#"detect assignments that will never be read"# }, + LintCompletion { label: "unused_attributes", description: r#"detects attributes that were not used by the compiler"# }, + LintCompletion { label: "unused_braces", description: r#"unnecessary braces around an expression"# }, + LintCompletion { label: "unused_comparisons", description: r#"comparisons made useless by limits of the types involved"# }, + LintCompletion { label: "unused_doc_comments", description: r#"detects doc comments that aren't used by rustdoc"# }, + LintCompletion { label: "unused_features", description: r#"unused features found in crate-level `#[feature]` directives"# }, + LintCompletion { label: "unused_imports", description: r#"imports that are never used"# }, + LintCompletion { label: "unused_labels", description: r#"detects labels that are never used"# }, + LintCompletion { label: "unused_macros", description: r#"detects macros that were not used"# }, + LintCompletion { label: "unused_must_use", description: r#"unused result of a type flagged as `#[must_use]`"# }, + LintCompletion { label: "unused_mut", description: r#"detect mut variables which don't need to be mutable"# }, + LintCompletion { label: "unused_parens", description: r#"`if`, `match`, `while` and `return` do not need parentheses"# }, + LintCompletion { label: "unused_unsafe", description: r#"unnecessary use of an `unsafe` block"# }, + LintCompletion { label: "unused_variables", description: r#"detect variables which are not used in any way"# }, + LintCompletion { label: "warnings", description: r#"mass-change the level for lints which produce warnings"# }, + LintCompletion { label: "where_clauses_object_safety", description: r#"checks the object safety of where clauses"# }, + LintCompletion { label: "while_true", description: r#"suggest using `loop { }` instead of `while true { }`"# }, + LintCompletion { label: "ambiguous_associated_items", description: r#"ambiguous associated items"# }, + LintCompletion { label: "arithmetic_overflow", description: r#"arithmetic operation overflows"# }, + LintCompletion { label: "conflicting_repr_hints", description: r#"conflicts between `#[repr(..)]` hints that were previously accepted and used in practice"# }, + LintCompletion { label: "const_err", description: r#"constant evaluation detected erroneous expression"# }, + LintCompletion { label: "ill_formed_attribute_input", description: r#"ill-formed attribute inputs that were previously accepted and used in practice"# }, + LintCompletion { label: "incomplete_include", description: r#"trailing content in included file"# }, + LintCompletion { label: "invalid_type_param_default", description: r#"type parameter default erroneously allowed in invalid location"# }, + LintCompletion { label: "macro_expanded_macro_exports_accessed_by_absolute_paths", description: r#"macro-expanded `macro_export` macros from the current crate cannot be referred to by absolute paths"# }, + LintCompletion { label: "missing_fragment_specifier", description: r#"detects missing fragment specifiers in unused `macro_rules!` patterns"# }, + LintCompletion { label: "mutable_transmutes", description: r#"mutating transmuted &mut T from &T may cause undefined behavior"# }, + LintCompletion { label: "no_mangle_const_items", description: r#"const items will not have their symbols exported"# }, + LintCompletion { label: "order_dependent_trait_objects", description: r#"trait-object types were treated as different depending on marker-trait order"# }, + LintCompletion { label: "overflowing_literals", description: r#"literal out of range for its type"# }, + LintCompletion { label: "patterns_in_fns_without_body", description: r#"patterns in functions without body were erroneously allowed"# }, + LintCompletion { label: "pub_use_of_private_extern_crate", description: r#"detect public re-exports of private extern crates"# }, + LintCompletion { label: "soft_unstable", description: r#"a feature gate that doesn't break dependent crates"# }, + LintCompletion { label: "unconditional_panic", description: r#"operation will cause a panic at runtime"# }, + LintCompletion { label: "unknown_crate_types", description: r#"unknown crate type found in `#[crate_type]` directive"# }, +]; + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::completion::{test_utils::completion_list, CompletionKind}; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Attribute); + expect.assert_eq(&actual); + } + + #[test] + fn empty_derive_completion() { + check( + r#" +#[derive(<|>)] +struct Test {} + "#, + expect![[r#" + at Clone + at Copy, Clone + at Debug + at Default + at Eq, PartialEq + at Hash + at Ord, PartialOrd, Eq, PartialEq + at PartialEq + at PartialOrd, PartialEq + "#]], + ); + } + + #[test] + fn empty_lint_completion() { + check( + r#"#[allow(<|>)]"#, + expect![[r#" + at absolute_paths_not_starting_with_crate fully qualified paths that start with a module name instead of `crate`, `self`, or an extern crate name + at ambiguous_associated_items ambiguous associated items + at anonymous_parameters detects anonymous parameters + at arithmetic_overflow arithmetic operation overflows + at array_into_iter detects calling `into_iter` on arrays + at asm_sub_register using only a subset of a register for inline asm inputs + at bare_trait_objects suggest using `dyn Trait` for trait objects + at bindings_with_variant_name detects pattern bindings with the same name as one of the matched variants + at box_pointers use of owned (Box type) heap memory + at cenum_impl_drop_cast a C-like enum implementing Drop is cast + at clashing_extern_declarations detects when an extern fn has been declared with the same name but different types + at coherence_leak_check distinct impls distinguished only by the leak-check code + at conflicting_repr_hints conflicts between `#[repr(..)]` hints that were previously accepted and used in practice + at confusable_idents detects visually confusable pairs between identifiers + at const_err constant evaluation detected erroneous expression + at dead_code detect unused, unexported items + at deprecated detects use of deprecated items + at deprecated_in_future detects use of items that will be deprecated in a future version + at elided_lifetimes_in_paths hidden lifetime parameters in types are deprecated + at ellipsis_inclusive_range_patterns `...` range patterns are deprecated + at explicit_outlives_requirements outlives requirements can be inferred + at exported_private_dependencies public interface leaks type from a private dependency + at ill_formed_attribute_input ill-formed attribute inputs that were previously accepted and used in practice + at illegal_floating_point_literal_pattern floating-point literals cannot be used in patterns + at improper_ctypes proper use of libc types in foreign modules + at improper_ctypes_definitions proper use of libc types in foreign item definitions + at incomplete_features incomplete features that may function improperly in some or all cases + at incomplete_include trailing content in included file + at indirect_structural_match pattern with const indirectly referencing non-structural-match type + at inline_no_sanitize detects incompatible use of `#[inline(always)]` and `#[no_sanitize(...)]` + at intra_doc_link_resolution_failure failures in resolving intra-doc link targets + at invalid_codeblock_attributes codeblock attribute looks a lot like a known one + at invalid_type_param_default type parameter default erroneously allowed in invalid location + at invalid_value an invalid value is being created (such as a NULL reference) + at irrefutable_let_patterns detects irrefutable patterns in if-let and while-let statements + at keyword_idents detects edition keywords being used as an identifier + at late_bound_lifetime_arguments detects generic lifetime arguments in path segments with late bound lifetime parameters + at macro_expanded_macro_exports_accessed_by_absolute_paths macro-expanded `macro_export` macros from the current crate cannot be referred to by absolute paths + at macro_use_extern_crate the `#[macro_use]` attribute is now deprecated in favor of using macros via the module system + at meta_variable_misuse possible meta-variable misuse at macro definition + at missing_copy_implementations detects potentially-forgotten implementations of `Copy` + at missing_crate_level_docs detects crates with no crate-level documentation + at missing_debug_implementations detects missing implementations of Debug + at missing_doc_code_examples detects publicly-exported items without code samples in their documentation + at missing_docs detects missing documentation for public members + at missing_fragment_specifier detects missing fragment specifiers in unused `macro_rules!` patterns + at mixed_script_confusables detects Unicode scripts whose mixed script confusables codepoints are solely used + at mutable_borrow_reservation_conflict reservation of a two-phased borrow conflicts with other shared borrows + at mutable_transmutes mutating transmuted &mut T from &T may cause undefined behavior + at no_mangle_const_items const items will not have their symbols exported + at no_mangle_generic_items generic items must be mangled + at non_ascii_idents detects non-ASCII identifiers + at non_camel_case_types types, variants, traits and type parameters should have camel case names + at non_shorthand_field_patterns using `Struct { x: x }` instead of `Struct { x }` in a pattern + at non_snake_case variables, methods, functions, lifetime parameters and modules should have snake case names + at non_upper_case_globals static constants should have uppercase identifiers + at order_dependent_trait_objects trait-object types were treated as different depending on marker-trait order + at overflowing_literals literal out of range for its type + at overlapping_patterns detects overlapping patterns + at path_statements path statements with no effect + at patterns_in_fns_without_body patterns in functions without body were erroneously allowed + at private_doc_tests detects code samples in docs of private items not documented by rustdoc + at private_in_public detect private items in public interfaces not caught by the old implementation + at proc_macro_derive_resolution_fallback detects proc macro derives using inaccessible names from parent modules + at pub_use_of_private_extern_crate detect public re-exports of private extern crates + at redundant_semicolons detects unnecessary trailing semicolons + at renamed_and_removed_lints lints that have been renamed or removed + at safe_packed_borrows safe borrows of fields of packed structs were erroneously allowed + at single_use_lifetimes detects lifetime parameters that are only used once + at soft_unstable a feature gate that doesn't break dependent crates + at stable_features stable features found in `#[feature]` directive + at trivial_bounds these bounds don't depend on an type parameters + at trivial_casts detects trivial casts which could be removed + at trivial_numeric_casts detects trivial casts of numeric types which could be removed + at type_alias_bounds bounds in type aliases are not enforced + at tyvar_behind_raw_pointer raw pointer to an inference variable + at unaligned_references detects unaligned references to fields of packed structs + at uncommon_codepoints detects uncommon Unicode codepoints in identifiers + at unconditional_panic operation will cause a panic at runtime + at unconditional_recursion functions that cannot return without calling themselves + at unknown_crate_types unknown crate type found in `#[crate_type]` directive + at unknown_lints unrecognized lint attribute + at unnameable_test_items detects an item that cannot be named being marked as `#[test_case]` + at unreachable_code detects unreachable code paths + at unreachable_patterns detects unreachable patterns + at unreachable_pub `pub` items not reachable from crate root + at unsafe_code usage of `unsafe` code + at unsafe_op_in_unsafe_fn unsafe operations in unsafe functions without an explicit unsafe block are deprecated + at unstable_features enabling unstable features (deprecated. do not use) + at unstable_name_collisions detects name collision with an existing but unstable method + at unused_allocation detects unnecessary allocations that can be eliminated + at unused_assignments detect assignments that will never be read + at unused_attributes detects attributes that were not used by the compiler + at unused_braces unnecessary braces around an expression + at unused_comparisons comparisons made useless by limits of the types involved + at unused_crate_dependencies crate dependencies that are never used + at unused_doc_comments detects doc comments that aren't used by rustdoc + at unused_extern_crates extern crates that are never used + at unused_features unused features found in crate-level `#[feature]` directives + at unused_import_braces unnecessary braces around an imported item + at unused_imports imports that are never used + at unused_labels detects labels that are never used + at unused_lifetimes detects lifetime parameters that are never used + at unused_macros detects macros that were not used + at unused_must_use unused result of a type flagged as `#[must_use]` + at unused_mut detect mut variables which don't need to be mutable + at unused_parens `if`, `match`, `while` and `return` do not need parentheses + at unused_qualifications detects unnecessarily qualified names + at unused_results unused result of an expression in a statement + at unused_unsafe unnecessary use of an `unsafe` block + at unused_variables detect variables which are not used in any way + at variant_size_differences detects enums with widely varying variant sizes + at warnings mass-change the level for lints which produce warnings + at where_clauses_object_safety checks the object safety of where clauses + at while_true suggest using `loop { }` instead of `while true { }` + "#]], + ) + } + + #[test] + fn no_completion_for_incorrect_derive() { + check( + r#" +#[derive{<|>)] +struct Test {} +"#, + expect![[r#""#]], + ) + } + + #[test] + fn derive_with_input_completion() { + check( + r#" +#[derive(serde::Serialize, PartialEq, <|>)] +struct Test {} +"#, + expect![[r#" + at Clone + at Copy, Clone + at Debug + at Default + at Eq + at Hash + at Ord, PartialOrd, Eq + at PartialOrd + "#]], + ) + } + + #[test] + fn test_attribute_completion() { + check( + r#"#[<|>]"#, + expect![[r#" + at allow(…) + at cfg(…) + at cfg_attr(…) + at deny(…) + at deprecated = "…" + at derive(…) + at doc = "…" + at forbid(…) + at ignore = "…" + at inline(…) + at link + at link_name = "…" + at macro_export + at macro_use + at must_use = "…" + at no_mangle + at non_exhaustive + at path = "…" + at proc_macro + at proc_macro_attribute + at proc_macro_derive(…) + at repr(…) + at should_panic(…) + at target_feature = "…" + at test + at used + at warn(…) + "#]], + ) + } + + #[test] + fn test_attribute_completion_inside_nested_attr() { + check(r#"#[cfg(<|>)]"#, expect![[]]) + } + + #[test] + fn test_inner_attribute_completion() { + check( + r"#![<|>]", + expect![[r#" + at allow(…) + at cfg(…) + at cfg_attr(…) + at deny(…) + at deprecated = "…" + at derive(…) + at doc = "…" + at feature(…) + at forbid(…) + at global_allocator + at ignore = "…" + at inline(…) + at link + at link_name = "…" + at macro_export + at macro_use + at must_use = "…" + at no_mangle + at no_std + at non_exhaustive + at panic_handler + at path = "…" + at proc_macro + at proc_macro_attribute + at proc_macro_derive(…) + at recursion_limit = … + at repr(…) + at should_panic(…) + at target_feature = "…" + at test + at used + at warn(…) + at windows_subsystem = "…" + "#]], + ); + } +} diff --git a/crates/ide/src/completion/complete_dot.rs b/crates/ide/src/completion/complete_dot.rs new file mode 100644 index 000000000..532665285 --- /dev/null +++ b/crates/ide/src/completion/complete_dot.rs @@ -0,0 +1,416 @@ +//! Completes references after dot (fields and method calls). + +use hir::{HasVisibility, Type}; +use rustc_hash::FxHashSet; +use test_utils::mark; + +use crate::completion::{completion_context::CompletionContext, completion_item::Completions}; + +/// Complete dot accesses, i.e. fields or methods. +pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) { + let dot_receiver = match &ctx.dot_receiver { + Some(expr) => expr, + _ => return, + }; + + let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) { + Some(ty) => ty, + _ => return, + }; + + if ctx.is_call { + mark::hit!(test_no_struct_field_completion_for_method_call); + } else { + complete_fields(acc, ctx, &receiver_ty); + } + complete_methods(acc, ctx, &receiver_ty); +} + +fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) { + for receiver in receiver.autoderef(ctx.db) { + for (field, ty) in receiver.fields(ctx.db) { + if ctx.scope.module().map_or(false, |m| !field.is_visible_from(ctx.db, m)) { + // Skip private field. FIXME: If the definition location of the + // field is editable, we should show the completion + continue; + } + acc.add_field(ctx, field, &ty); + } + for (i, ty) in receiver.tuple_fields(ctx.db).into_iter().enumerate() { + // FIXME: Handle visibility + acc.add_tuple_field(ctx, i, &ty); + } + } +} + +fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) { + if let Some(krate) = ctx.krate { + let mut seen_methods = FxHashSet::default(); + let traits_in_scope = ctx.scope.traits_in_scope(); + receiver.iterate_method_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, func| { + if func.has_self_param(ctx.db) + && ctx.scope.module().map_or(true, |m| func.is_visible_from(ctx.db, m)) + && seen_methods.insert(func.name(ctx.db)) + { + acc.add_function(ctx, func, None); + } + None::<()> + }); + } +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + use test_utils::mark; + + use crate::completion::{test_utils::completion_list, CompletionKind}; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Reference); + expect.assert_eq(&actual); + } + + #[test] + fn test_struct_field_and_method_completion() { + check( + r#" +struct S { foo: u32 } +impl S { + fn bar(&self) {} +} +fn foo(s: S) { s.<|> } +"#, + expect![[r#" + me bar() fn bar(&self) + fd foo u32 + "#]], + ); + } + + #[test] + fn test_struct_field_completion_self() { + check( + r#" +struct S { the_field: (u32,) } +impl S { + fn foo(self) { self.<|> } +} +"#, + expect![[r#" + me foo() fn foo(self) + fd the_field (u32,) + "#]], + ) + } + + #[test] + fn test_struct_field_completion_autoderef() { + check( + r#" +struct A { the_field: (u32, i32) } +impl A { + fn foo(&self) { self.<|> } +} +"#, + expect![[r#" + me foo() fn foo(&self) + fd the_field (u32, i32) + "#]], + ) + } + + #[test] + fn test_no_struct_field_completion_for_method_call() { + mark::check!(test_no_struct_field_completion_for_method_call); + check( + r#" +struct A { the_field: u32 } +fn foo(a: A) { a.<|>() } +"#, + expect![[""]], + ); + } + + #[test] + fn test_visibility_filtering() { + check( + r#" +mod inner { + pub struct A { + private_field: u32, + pub pub_field: u32, + pub(crate) crate_field: u32, + pub(super) super_field: u32, + } +} +fn foo(a: inner::A) { a.<|> } +"#, + expect![[r#" + fd crate_field u32 + fd pub_field u32 + fd super_field u32 + "#]], + ); + + check( + r#" +struct A {} +mod m { + impl super::A { + fn private_method(&self) {} + pub(super) fn the_method(&self) {} + } +} +fn foo(a: A) { a.<|> } +"#, + expect![[r#" + me the_method() pub(super) fn the_method(&self) + "#]], + ); + } + + #[test] + fn test_union_field_completion() { + check( + r#" +union U { field: u8, other: u16 } +fn foo(u: U) { u.<|> } +"#, + expect![[r#" + fd field u8 + fd other u16 + "#]], + ); + } + + #[test] + fn test_method_completion_only_fitting_impls() { + check( + r#" +struct A {} +impl A { + fn the_method(&self) {} +} +impl A { + fn the_other_method(&self) {} +} +fn foo(a: A) { a.<|> } +"#, + expect![[r#" + me the_method() fn the_method(&self) + "#]], + ) + } + + #[test] + fn test_trait_method_completion() { + check( + r#" +struct A {} +trait Trait { fn the_method(&self); } +impl Trait for A {} +fn foo(a: A) { a.<|> } +"#, + expect![[r#" + me the_method() fn the_method(&self) + "#]], + ); + } + + #[test] + fn test_trait_method_completion_deduplicated() { + check( + r" +struct A {} +trait Trait { fn the_method(&self); } +impl Trait for T {} +fn foo(a: &A) { a.<|> } +", + expect![[r#" + me the_method() fn the_method(&self) + "#]], + ); + } + + #[test] + fn completes_trait_method_from_other_module() { + check( + r" +struct A {} +mod m { + pub trait Trait { fn the_method(&self); } +} +use m::Trait; +impl Trait for A {} +fn foo(a: A) { a.<|> } +", + expect![[r#" + me the_method() fn the_method(&self) + "#]], + ); + } + + #[test] + fn test_no_non_self_method() { + check( + r#" +struct A {} +impl A { + fn the_method() {} +} +fn foo(a: A) { + a.<|> +} +"#, + expect![[""]], + ); + } + + #[test] + fn test_tuple_field_completion() { + check( + r#" +fn foo() { + let b = (0, 3.14); + b.<|> +} +"#, + expect![[r#" + fd 0 i32 + fd 1 f64 + "#]], + ) + } + + #[test] + fn test_tuple_field_inference() { + check( + r#" +pub struct S; +impl S { pub fn blah(&self) {} } + +struct T(S); + +impl T { + fn foo(&self) { + // FIXME: This doesn't work without the trailing `a` as `0.` is a float + self.0.a<|> + } +} +"#, + expect![[r#" + me blah() pub fn blah(&self) + "#]], + ); + } + + #[test] + fn test_completion_works_in_consts() { + check( + r#" +struct A { the_field: u32 } +const X: u32 = { + A { the_field: 92 }.<|> +}; +"#, + expect![[r#" + fd the_field u32 + "#]], + ); + } + + #[test] + fn works_in_simple_macro_1() { + check( + r#" +macro_rules! m { ($e:expr) => { $e } } +struct A { the_field: u32 } +fn foo(a: A) { + m!(a.x<|>) +} +"#, + expect![[r#" + fd the_field u32 + "#]], + ); + } + + #[test] + fn works_in_simple_macro_2() { + // this doesn't work yet because the macro doesn't expand without the token -- maybe it can be fixed with better recovery + check( + r#" +macro_rules! m { ($e:expr) => { $e } } +struct A { the_field: u32 } +fn foo(a: A) { + m!(a.<|>) +} +"#, + expect![[r#" + fd the_field u32 + "#]], + ); + } + + #[test] + fn works_in_simple_macro_recursive_1() { + check( + r#" +macro_rules! m { ($e:expr) => { $e } } +struct A { the_field: u32 } +fn foo(a: A) { + m!(m!(m!(a.x<|>))) +} +"#, + expect![[r#" + fd the_field u32 + "#]], + ); + } + + #[test] + fn macro_expansion_resilient() { + check( + r#" +macro_rules! dbg { + () => {}; + ($val:expr) => { + match $val { tmp => { tmp } } + }; + // Trailing comma with single argument is ignored + ($val:expr,) => { $crate::dbg!($val) }; + ($($val:expr),+ $(,)?) => { + ($($crate::dbg!($val)),+,) + }; +} +struct A { the_field: u32 } +fn foo(a: A) { + dbg!(a.<|>) +} +"#, + expect![[r#" + fd the_field u32 + "#]], + ); + } + + #[test] + fn test_method_completion_issue_3547() { + check( + r#" +struct HashSet {} +impl HashSet { + pub fn the_method(&self) {} +} +fn foo() { + let s: HashSet<_>; + s.<|> +} +"#, + expect![[r#" + me the_method() pub fn the_method(&self) + "#]], + ); + } +} diff --git a/crates/ide/src/completion/complete_fn_param.rs b/crates/ide/src/completion/complete_fn_param.rs new file mode 100644 index 000000000..7c63ce58f --- /dev/null +++ b/crates/ide/src/completion/complete_fn_param.rs @@ -0,0 +1,135 @@ +//! See `complete_fn_param`. + +use rustc_hash::FxHashMap; +use syntax::{ + ast::{self, ModuleItemOwner}, + match_ast, AstNode, +}; + +use crate::completion::{CompletionContext, CompletionItem, CompletionKind, Completions}; + +/// Complete repeated parameters, both name and type. For example, if all +/// functions in a file have a `spam: &mut Spam` parameter, a completion with +/// `spam: &mut Spam` insert text/label and `spam` lookup string will be +/// suggested. +pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) { + if !ctx.is_param { + return; + } + + let mut params = FxHashMap::default(); + + let me = ctx.token.ancestors().find_map(ast::Fn::cast); + let mut process_fn = |func: ast::Fn| { + if Some(&func) == me.as_ref() { + return; + } + func.param_list().into_iter().flat_map(|it| it.params()).for_each(|param| { + let text = param.syntax().text().to_string(); + params.entry(text).or_insert(param); + }) + }; + + for node in ctx.token.parent().ancestors() { + match_ast! { + match node { + ast::SourceFile(it) => it.items().filter_map(|item| match item { + ast::Item::Fn(it) => Some(it), + _ => None, + }).for_each(&mut process_fn), + ast::ItemList(it) => it.items().filter_map(|item| match item { + ast::Item::Fn(it) => Some(it), + _ => None, + }).for_each(&mut process_fn), + ast::AssocItemList(it) => it.assoc_items().filter_map(|item| match item { + ast::AssocItem::Fn(it) => Some(it), + _ => None, + }).for_each(&mut process_fn), + _ => continue, + } + }; + } + + params + .into_iter() + .filter_map(|(label, param)| { + let lookup = param.pat()?.syntax().text().to_string(); + Some((label, lookup)) + }) + .for_each(|(label, lookup)| { + CompletionItem::new(CompletionKind::Magic, ctx.source_range(), label) + .kind(crate::CompletionItemKind::Binding) + .lookup_by(lookup) + .add_to(acc) + }); +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::completion::{test_utils::completion_list, CompletionKind}; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Magic); + expect.assert_eq(&actual); + } + + #[test] + fn test_param_completion_last_param() { + check( + r#" +fn foo(file_id: FileId) {} +fn bar(file_id: FileId) {} +fn baz(file<|>) {} +"#, + expect![[r#" + bn file_id: FileId + "#]], + ); + } + + #[test] + fn test_param_completion_nth_param() { + check( + r#" +fn foo(file_id: FileId) {} +fn baz(file<|>, x: i32) {} +"#, + expect![[r#" + bn file_id: FileId + "#]], + ); + } + + #[test] + fn test_param_completion_trait_param() { + check( + r#" +pub(crate) trait SourceRoot { + pub fn contains(&self, file_id: FileId) -> bool; + pub fn module_map(&self) -> &ModuleMap; + pub fn lines(&self, file_id: FileId) -> &LineIndex; + pub fn syntax(&self, file<|>) +} +"#, + expect![[r#" + bn file_id: FileId + "#]], + ); + } + + #[test] + fn completes_param_in_inner_function() { + check( + r#" +fn outer(text: String) { + fn inner(<|>) +} +"#, + expect![[r#" + bn text: String + "#]], + ) + } +} diff --git a/crates/ide/src/completion/complete_keyword.rs b/crates/ide/src/completion/complete_keyword.rs new file mode 100644 index 000000000..a80708935 --- /dev/null +++ b/crates/ide/src/completion/complete_keyword.rs @@ -0,0 +1,536 @@ +//! FIXME: write short doc here + +use syntax::{ast, SyntaxKind}; +use test_utils::mark; + +use crate::completion::{ + CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions, +}; + +pub(super) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionContext) { + // complete keyword "crate" in use stmt + let source_range = ctx.source_range(); + match (ctx.use_item_syntax.as_ref(), ctx.path_prefix.as_ref()) { + (Some(_), None) => { + CompletionItem::new(CompletionKind::Keyword, source_range, "crate::") + .kind(CompletionItemKind::Keyword) + .insert_text("crate::") + .add_to(acc); + CompletionItem::new(CompletionKind::Keyword, source_range, "self") + .kind(CompletionItemKind::Keyword) + .add_to(acc); + CompletionItem::new(CompletionKind::Keyword, source_range, "super::") + .kind(CompletionItemKind::Keyword) + .insert_text("super::") + .add_to(acc); + } + (Some(_), Some(_)) => { + CompletionItem::new(CompletionKind::Keyword, source_range, "self") + .kind(CompletionItemKind::Keyword) + .add_to(acc); + CompletionItem::new(CompletionKind::Keyword, source_range, "super::") + .kind(CompletionItemKind::Keyword) + .insert_text("super::") + .add_to(acc); + } + _ => {} + } + + // Suggest .await syntax for types that implement Future trait + if let Some(receiver) = &ctx.dot_receiver { + if let Some(ty) = ctx.sema.type_of_expr(receiver) { + if ty.impls_future(ctx.db) { + CompletionItem::new(CompletionKind::Keyword, ctx.source_range(), "await") + .kind(CompletionItemKind::Keyword) + .detail("expr.await") + .insert_text("await") + .add_to(acc); + } + }; + } +} + +pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) { + if ctx.token.kind() == SyntaxKind::COMMENT { + mark::hit!(no_keyword_completion_in_comments); + return; + } + + let has_trait_or_impl_parent = ctx.has_impl_parent || ctx.has_trait_parent; + if ctx.trait_as_prev_sibling || ctx.impl_as_prev_sibling { + add_keyword(ctx, acc, "where", "where "); + return; + } + if ctx.unsafe_is_prev { + if ctx.has_item_list_or_source_file_parent || ctx.block_expr_parent { + add_keyword(ctx, acc, "fn", "fn $0() {}") + } + + if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { + add_keyword(ctx, acc, "trait", "trait $0 {}"); + add_keyword(ctx, acc, "impl", "impl $0 {}"); + } + + return; + } + if ctx.has_item_list_or_source_file_parent || has_trait_or_impl_parent || ctx.block_expr_parent + { + add_keyword(ctx, acc, "fn", "fn $0() {}"); + } + if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { + add_keyword(ctx, acc, "use", "use "); + add_keyword(ctx, acc, "impl", "impl $0 {}"); + add_keyword(ctx, acc, "trait", "trait $0 {}"); + } + + if ctx.has_item_list_or_source_file_parent { + add_keyword(ctx, acc, "enum", "enum $0 {}"); + add_keyword(ctx, acc, "struct", "struct $0"); + add_keyword(ctx, acc, "union", "union $0 {}"); + } + + if ctx.is_expr { + add_keyword(ctx, acc, "match", "match $0 {}"); + add_keyword(ctx, acc, "while", "while $0 {}"); + add_keyword(ctx, acc, "loop", "loop {$0}"); + add_keyword(ctx, acc, "if", "if "); + add_keyword(ctx, acc, "if let", "if let "); + } + + if ctx.if_is_prev || ctx.block_expr_parent { + add_keyword(ctx, acc, "let", "let "); + } + + if ctx.after_if { + add_keyword(ctx, acc, "else", "else {$0}"); + add_keyword(ctx, acc, "else if", "else if $0 {}"); + } + if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { + add_keyword(ctx, acc, "mod", "mod $0 {}"); + } + if ctx.bind_pat_parent || ctx.ref_pat_parent { + add_keyword(ctx, acc, "mut", "mut "); + } + if ctx.has_item_list_or_source_file_parent || has_trait_or_impl_parent || ctx.block_expr_parent + { + add_keyword(ctx, acc, "const", "const "); + add_keyword(ctx, acc, "type", "type "); + } + if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { + add_keyword(ctx, acc, "static", "static "); + }; + if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { + add_keyword(ctx, acc, "extern", "extern "); + } + if ctx.has_item_list_or_source_file_parent + || has_trait_or_impl_parent + || ctx.block_expr_parent + || ctx.is_match_arm + { + add_keyword(ctx, acc, "unsafe", "unsafe "); + } + if ctx.in_loop_body { + if ctx.can_be_stmt { + add_keyword(ctx, acc, "continue", "continue;"); + add_keyword(ctx, acc, "break", "break;"); + } else { + add_keyword(ctx, acc, "continue", "continue"); + add_keyword(ctx, acc, "break", "break"); + } + } + if ctx.has_item_list_or_source_file_parent || ctx.has_impl_parent { + add_keyword(ctx, acc, "pub", "pub ") + } + + if !ctx.is_trivial_path { + return; + } + let fn_def = match &ctx.function_syntax { + Some(it) => it, + None => return, + }; + acc.add_all(complete_return(ctx, &fn_def, ctx.can_be_stmt)); +} + +fn keyword(ctx: &CompletionContext, kw: &str, snippet: &str) -> CompletionItem { + let res = CompletionItem::new(CompletionKind::Keyword, ctx.source_range(), kw) + .kind(CompletionItemKind::Keyword); + + match ctx.config.snippet_cap { + Some(cap) => res.insert_snippet(cap, snippet), + _ => res.insert_text(if snippet.contains('$') { kw } else { snippet }), + } + .build() +} + +fn add_keyword(ctx: &CompletionContext, acc: &mut Completions, kw: &str, snippet: &str) { + acc.add(keyword(ctx, kw, snippet)); +} + +fn complete_return( + ctx: &CompletionContext, + fn_def: &ast::Fn, + can_be_stmt: bool, +) -> Option { + let snip = match (can_be_stmt, fn_def.ret_type().is_some()) { + (true, true) => "return $0;", + (true, false) => "return;", + (false, true) => "return $0", + (false, false) => "return", + }; + Some(keyword(ctx, "return", snip)) +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::completion::{ + test_utils::{check_edit, completion_list}, + CompletionKind, + }; + use test_utils::mark; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Keyword); + expect.assert_eq(&actual) + } + + #[test] + fn test_keywords_in_use_stmt() { + check( + r"use <|>", + expect![[r#" + kw crate:: + kw self + kw super:: + "#]], + ); + + check( + r"use a::<|>", + expect![[r#" + kw self + kw super:: + "#]], + ); + + check( + r"use a::{b, <|>}", + expect![[r#" + kw self + kw super:: + "#]], + ); + } + + #[test] + fn test_keywords_at_source_file_level() { + check( + r"m<|>", + expect![[r#" + kw const + kw enum + kw extern + kw fn + kw impl + kw mod + kw pub + kw static + kw struct + kw trait + kw type + kw union + kw unsafe + kw use + "#]], + ); + } + + #[test] + fn test_keywords_in_function() { + check( + r"fn quux() { <|> }", + expect![[r#" + kw const + kw extern + kw fn + kw if + kw if let + kw impl + kw let + kw loop + kw match + kw mod + kw return + kw static + kw trait + kw type + kw unsafe + kw use + kw while + "#]], + ); + } + + #[test] + fn test_keywords_inside_block() { + check( + r"fn quux() { if true { <|> } }", + expect![[r#" + kw const + kw extern + kw fn + kw if + kw if let + kw impl + kw let + kw loop + kw match + kw mod + kw return + kw static + kw trait + kw type + kw unsafe + kw use + kw while + "#]], + ); + } + + #[test] + fn test_keywords_after_if() { + check( + r#"fn quux() { if true { () } <|> }"#, + expect![[r#" + kw const + kw else + kw else if + kw extern + kw fn + kw if + kw if let + kw impl + kw let + kw loop + kw match + kw mod + kw return + kw static + kw trait + kw type + kw unsafe + kw use + kw while + "#]], + ); + check_edit( + "else", + r#"fn quux() { if true { () } <|> }"#, + r#"fn quux() { if true { () } else {$0} }"#, + ); + } + + #[test] + fn test_keywords_in_match_arm() { + check( + r#" +fn quux() -> i32 { + match () { () => <|> } +} +"#, + expect![[r#" + kw if + kw if let + kw loop + kw match + kw return + kw unsafe + kw while + "#]], + ); + } + + #[test] + fn test_keywords_in_trait_def() { + check( + r"trait My { <|> }", + expect![[r#" + kw const + kw fn + kw type + kw unsafe + "#]], + ); + } + + #[test] + fn test_keywords_in_impl_def() { + check( + r"impl My { <|> }", + expect![[r#" + kw const + kw fn + kw pub + kw type + kw unsafe + "#]], + ); + } + + #[test] + fn test_keywords_in_loop() { + check( + r"fn my() { loop { <|> } }", + expect![[r#" + kw break + kw const + kw continue + kw extern + kw fn + kw if + kw if let + kw impl + kw let + kw loop + kw match + kw mod + kw return + kw static + kw trait + kw type + kw unsafe + kw use + kw while + "#]], + ); + } + + #[test] + fn test_keywords_after_unsafe_in_item_list() { + check( + r"unsafe <|>", + expect![[r#" + kw fn + kw impl + kw trait + "#]], + ); + } + + #[test] + fn test_keywords_after_unsafe_in_block_expr() { + check( + r"fn my_fn() { unsafe <|> }", + expect![[r#" + kw fn + kw impl + kw trait + "#]], + ); + } + + #[test] + fn test_mut_in_ref_and_in_fn_parameters_list() { + check( + r"fn my_fn(&<|>) {}", + expect![[r#" + kw mut + "#]], + ); + check( + r"fn my_fn(<|>) {}", + expect![[r#" + kw mut + "#]], + ); + check( + r"fn my_fn() { let &<|> }", + expect![[r#" + kw mut + "#]], + ); + } + + #[test] + fn test_where_keyword() { + check( + r"trait A <|>", + expect![[r#" + kw where + "#]], + ); + check( + r"impl A <|>", + expect![[r#" + kw where + "#]], + ); + } + + #[test] + fn no_keyword_completion_in_comments() { + mark::check!(no_keyword_completion_in_comments); + check( + r#" +fn test() { + let x = 2; // A comment<|> +} +"#, + expect![[""]], + ); + check( + r#" +/* +Some multi-line comment<|> +*/ +"#, + expect![[""]], + ); + check( + r#" +/// Some doc comment +/// let test<|> = 1 +"#, + expect![[""]], + ); + } + + #[test] + fn test_completion_await_impls_future() { + check( + r#" +//- /main.rs +use std::future::*; +struct A {} +impl Future for A {} +fn foo(a: A) { a.<|> } + +//- /std/lib.rs +pub mod future { + #[lang = "future_trait"] + pub trait Future {} +} +"#, + expect![[r#" + kw await expr.await + "#]], + ) + } + + #[test] + fn after_let() { + check( + r#"fn main() { let _ = <|> }"#, + expect![[r#" + kw if + kw if let + kw loop + kw match + kw return + kw while + "#]], + ) + } +} diff --git a/crates/ide/src/completion/complete_macro_in_item_position.rs b/crates/ide/src/completion/complete_macro_in_item_position.rs new file mode 100644 index 000000000..0447f0511 --- /dev/null +++ b/crates/ide/src/completion/complete_macro_in_item_position.rs @@ -0,0 +1,41 @@ +//! FIXME: write short doc here + +use crate::completion::{CompletionContext, Completions}; + +pub(super) fn complete_macro_in_item_position(acc: &mut Completions, ctx: &CompletionContext) { + // Show only macros in top level. + if ctx.is_new_item { + ctx.scope.process_all_names(&mut |name, res| { + if let hir::ScopeDef::MacroDef(mac) = res { + acc.add_macro(ctx, Some(name.to_string()), mac); + } + }) + } +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::completion::{test_utils::completion_list, CompletionKind}; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Reference); + expect.assert_eq(&actual) + } + + #[test] + fn completes_macros_as_item() { + check( + r#" +macro_rules! foo { () => {} } +fn foo() {} + +<|> +"#, + expect![[r#" + ma foo!(…) macro_rules! foo + "#]], + ) + } +} diff --git a/crates/ide/src/completion/complete_pattern.rs b/crates/ide/src/completion/complete_pattern.rs new file mode 100644 index 000000000..aceb77cb5 --- /dev/null +++ b/crates/ide/src/completion/complete_pattern.rs @@ -0,0 +1,88 @@ +//! FIXME: write short doc here + +use crate::completion::{CompletionContext, Completions}; + +/// Completes constats and paths in patterns. +pub(super) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) { + if !ctx.is_pat_binding_or_const { + return; + } + if ctx.record_pat_syntax.is_some() { + return; + } + + // FIXME: ideally, we should look at the type we are matching against and + // suggest variants + auto-imports + ctx.scope.process_all_names(&mut |name, res| { + match &res { + hir::ScopeDef::ModuleDef(def) => match def { + hir::ModuleDef::Adt(hir::Adt::Enum(..)) + | hir::ModuleDef::Adt(hir::Adt::Struct(..)) + | hir::ModuleDef::EnumVariant(..) + | hir::ModuleDef::Const(..) + | hir::ModuleDef::Module(..) => (), + _ => return, + }, + hir::ScopeDef::MacroDef(_) => (), + _ => return, + }; + + acc.add_resolution(ctx, name.to_string(), &res) + }); +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::completion::{test_utils::completion_list, CompletionKind}; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Reference); + expect.assert_eq(&actual) + } + + #[test] + fn completes_enum_variants_and_modules() { + check( + r#" +enum E { X } +use self::E::X; +const Z: E = E::X; +mod m {} + +static FOO: E = E::X; +struct Bar { f: u32 } + +fn foo() { + match E::X { <|> } +} +"#, + expect![[r#" + st Bar + en E + ev X () + ct Z + md m + "#]], + ); + } + + #[test] + fn completes_in_simple_macro_call() { + check( + r#" +macro_rules! m { ($e:expr) => { $e } } +enum E { X } + +fn foo() { + m!(match E::X { <|> }) +} +"#, + expect![[r#" + en E + ma m!(…) macro_rules! m + "#]], + ); + } +} diff --git a/crates/ide/src/completion/complete_postfix.rs b/crates/ide/src/completion/complete_postfix.rs new file mode 100644 index 000000000..d50b13c52 --- /dev/null +++ b/crates/ide/src/completion/complete_postfix.rs @@ -0,0 +1,378 @@ +//! FIXME: write short doc here +use assists::utils::TryEnum; +use syntax::{ + ast::{self, AstNode}, + TextRange, TextSize, +}; +use text_edit::TextEdit; + +use crate::{ + completion::{ + completion_config::SnippetCap, + completion_context::CompletionContext, + completion_item::{Builder, CompletionKind, Completions}, + }, + CompletionItem, CompletionItemKind, +}; + +pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { + if !ctx.config.enable_postfix_completions { + return; + } + + let dot_receiver = match &ctx.dot_receiver { + Some(it) => it, + None => return, + }; + + let receiver_text = + get_receiver_text(dot_receiver, ctx.dot_receiver_is_ambiguous_float_literal); + + let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) { + Some(it) => it, + None => return, + }; + + let cap = match ctx.config.snippet_cap { + Some(it) => it, + None => return, + }; + let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty); + if let Some(try_enum) = &try_enum { + match try_enum { + TryEnum::Result => { + postfix_snippet( + ctx, + cap, + &dot_receiver, + "ifl", + "if let Ok {}", + &format!("if let Ok($1) = {} {{\n $0\n}}", receiver_text), + ) + .add_to(acc); + + postfix_snippet( + ctx, + cap, + &dot_receiver, + "while", + "while let Ok {}", + &format!("while let Ok($1) = {} {{\n $0\n}}", receiver_text), + ) + .add_to(acc); + } + TryEnum::Option => { + postfix_snippet( + ctx, + cap, + &dot_receiver, + "ifl", + "if let Some {}", + &format!("if let Some($1) = {} {{\n $0\n}}", receiver_text), + ) + .add_to(acc); + + postfix_snippet( + ctx, + cap, + &dot_receiver, + "while", + "while let Some {}", + &format!("while let Some($1) = {} {{\n $0\n}}", receiver_text), + ) + .add_to(acc); + } + } + } else if receiver_ty.is_bool() || receiver_ty.is_unknown() { + postfix_snippet( + ctx, + cap, + &dot_receiver, + "if", + "if expr {}", + &format!("if {} {{\n $0\n}}", receiver_text), + ) + .add_to(acc); + postfix_snippet( + ctx, + cap, + &dot_receiver, + "while", + "while expr {}", + &format!("while {} {{\n $0\n}}", receiver_text), + ) + .add_to(acc); + postfix_snippet(ctx, cap, &dot_receiver, "not", "!expr", &format!("!{}", receiver_text)) + .add_to(acc); + } + + postfix_snippet(ctx, cap, &dot_receiver, "ref", "&expr", &format!("&{}", receiver_text)) + .add_to(acc); + postfix_snippet( + ctx, + cap, + &dot_receiver, + "refm", + "&mut expr", + &format!("&mut {}", receiver_text), + ) + .add_to(acc); + + // The rest of the postfix completions create an expression that moves an argument, + // so it's better to consider references now to avoid breaking the compilation + let dot_receiver = include_references(dot_receiver); + let receiver_text = + get_receiver_text(&dot_receiver, ctx.dot_receiver_is_ambiguous_float_literal); + + match try_enum { + Some(try_enum) => match try_enum { + TryEnum::Result => { + postfix_snippet( + ctx, + cap, + &dot_receiver, + "match", + "match expr {}", + &format!("match {} {{\n Ok(${{1:_}}) => {{$2}},\n Err(${{3:_}}) => {{$0}},\n}}", receiver_text), + ) + .add_to(acc); + } + TryEnum::Option => { + postfix_snippet( + ctx, + cap, + &dot_receiver, + "match", + "match expr {}", + &format!( + "match {} {{\n Some(${{1:_}}) => {{$2}},\n None => {{$0}},\n}}", + receiver_text + ), + ) + .add_to(acc); + } + }, + None => { + postfix_snippet( + ctx, + cap, + &dot_receiver, + "match", + "match expr {}", + &format!("match {} {{\n ${{1:_}} => {{$0}},\n}}", receiver_text), + ) + .add_to(acc); + } + } + + postfix_snippet( + ctx, + cap, + &dot_receiver, + "box", + "Box::new(expr)", + &format!("Box::new({})", receiver_text), + ) + .add_to(acc); + + postfix_snippet( + ctx, + cap, + &dot_receiver, + "dbg", + "dbg!(expr)", + &format!("dbg!({})", receiver_text), + ) + .add_to(acc); + + postfix_snippet( + ctx, + cap, + &dot_receiver, + "call", + "function(expr)", + &format!("${{1}}({})", receiver_text), + ) + .add_to(acc); +} + +fn get_receiver_text(receiver: &ast::Expr, receiver_is_ambiguous_float_literal: bool) -> String { + if receiver_is_ambiguous_float_literal { + let text = receiver.syntax().text(); + let without_dot = ..text.len() - TextSize::of('.'); + text.slice(without_dot).to_string() + } else { + receiver.to_string() + } +} + +fn include_references(initial_element: &ast::Expr) -> ast::Expr { + let mut resulting_element = initial_element.clone(); + while let Some(parent_ref_element) = + resulting_element.syntax().parent().and_then(ast::RefExpr::cast) + { + resulting_element = ast::Expr::from(parent_ref_element); + } + resulting_element +} + +fn postfix_snippet( + ctx: &CompletionContext, + cap: SnippetCap, + receiver: &ast::Expr, + label: &str, + detail: &str, + snippet: &str, +) -> Builder { + let edit = { + let receiver_syntax = receiver.syntax(); + let receiver_range = ctx.sema.original_range(receiver_syntax).range; + let delete_range = TextRange::new(receiver_range.start(), ctx.source_range().end()); + TextEdit::replace(delete_range, snippet.to_string()) + }; + CompletionItem::new(CompletionKind::Postfix, ctx.source_range(), label) + .detail(detail) + .kind(CompletionItemKind::Snippet) + .snippet_edit(cap, edit) +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::completion::{ + test_utils::{check_edit, completion_list}, + CompletionKind, + }; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Postfix); + expect.assert_eq(&actual) + } + + #[test] + fn postfix_completion_works_for_trivial_path_expression() { + check( + r#" +fn main() { + let bar = true; + bar.<|> +} +"#, + expect![[r#" + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn if if expr {} + sn match match expr {} + sn not !expr + sn ref &expr + sn refm &mut expr + sn while while expr {} + "#]], + ); + } + + #[test] + fn postfix_type_filtering() { + check( + r#" +fn main() { + let bar: u8 = 12; + bar.<|> +} +"#, + expect![[r#" + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn match match expr {} + sn ref &expr + sn refm &mut expr + "#]], + ) + } + + #[test] + fn option_iflet() { + check_edit( + "ifl", + r#" +enum Option { Some(T), None } + +fn main() { + let bar = Option::Some(true); + bar.<|> +} +"#, + r#" +enum Option { Some(T), None } + +fn main() { + let bar = Option::Some(true); + if let Some($1) = bar { + $0 +} +} +"#, + ); + } + + #[test] + fn result_match() { + check_edit( + "match", + r#" +enum Result { Ok(T), Err(E) } + +fn main() { + let bar = Result::Ok(true); + bar.<|> +} +"#, + r#" +enum Result { Ok(T), Err(E) } + +fn main() { + let bar = Result::Ok(true); + match bar { + Ok(${1:_}) => {$2}, + Err(${3:_}) => {$0}, +} +} +"#, + ); + } + + #[test] + fn postfix_completion_works_for_ambiguous_float_literal() { + check_edit("refm", r#"fn main() { 42.<|> }"#, r#"fn main() { &mut 42 }"#) + } + + #[test] + fn works_in_simple_macro() { + check_edit( + "dbg", + r#" +macro_rules! m { ($e:expr) => { $e } } +fn main() { + let bar: u8 = 12; + m!(bar.d<|>) +} +"#, + r#" +macro_rules! m { ($e:expr) => { $e } } +fn main() { + let bar: u8 = 12; + m!(dbg!(bar)) +} +"#, + ); + } + + #[test] + fn postfix_completion_for_references() { + check_edit("dbg", r#"fn main() { &&42.<|> }"#, r#"fn main() { dbg!(&&42) }"#); + check_edit("refm", r#"fn main() { &&42.<|> }"#, r#"fn main() { &&&mut 42 }"#); + } +} diff --git a/crates/ide/src/completion/complete_qualified_path.rs b/crates/ide/src/completion/complete_qualified_path.rs new file mode 100644 index 000000000..cb7dd23c1 --- /dev/null +++ b/crates/ide/src/completion/complete_qualified_path.rs @@ -0,0 +1,733 @@ +//! Completion of paths, i.e. `some::prefix::<|>`. + +use hir::{Adt, HasVisibility, PathResolution, ScopeDef}; +use rustc_hash::FxHashSet; +use syntax::AstNode; +use test_utils::mark; + +use crate::completion::{CompletionContext, Completions}; + +pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionContext) { + let path = match &ctx.path_prefix { + Some(path) => path.clone(), + None => return, + }; + + if ctx.attribute_under_caret.is_some() { + return; + } + + let context_module = ctx.scope.module(); + + let resolution = match ctx.scope.resolve_hir_path_qualifier(&path) { + Some(res) => res, + None => return, + }; + + // Add associated types on type parameters and `Self`. + resolution.assoc_type_shorthand_candidates(ctx.db, |alias| { + acc.add_type_alias(ctx, alias); + None::<()> + }); + + match resolution { + PathResolution::Def(hir::ModuleDef::Module(module)) => { + let module_scope = module.scope(ctx.db, context_module); + for (name, def) in module_scope { + if ctx.use_item_syntax.is_some() { + if let ScopeDef::Unknown = def { + if let Some(name_ref) = ctx.name_ref_syntax.as_ref() { + if name_ref.syntax().text() == name.to_string().as_str() { + // for `use self::foo<|>`, don't suggest `foo` as a completion + mark::hit!(dont_complete_current_use); + continue; + } + } + } + } + + acc.add_resolution(ctx, name.to_string(), &def); + } + } + PathResolution::Def(def @ hir::ModuleDef::Adt(_)) + | PathResolution::Def(def @ hir::ModuleDef::TypeAlias(_)) => { + if let hir::ModuleDef::Adt(Adt::Enum(e)) = def { + for variant in e.variants(ctx.db) { + acc.add_enum_variant(ctx, variant, None); + } + } + let ty = match def { + hir::ModuleDef::Adt(adt) => adt.ty(ctx.db), + hir::ModuleDef::TypeAlias(a) => a.ty(ctx.db), + _ => unreachable!(), + }; + + // XXX: For parity with Rust bug #22519, this does not complete Ty::AssocType. + // (where AssocType is defined on a trait, not an inherent impl) + + let krate = ctx.krate; + if let Some(krate) = krate { + let traits_in_scope = ctx.scope.traits_in_scope(); + ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| { + if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { + return None; + } + match item { + hir::AssocItem::Function(func) => { + acc.add_function(ctx, func, None); + } + hir::AssocItem::Const(ct) => acc.add_const(ctx, ct), + hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), + } + None::<()> + }); + + // Iterate assoc types separately + ty.iterate_assoc_items(ctx.db, krate, |item| { + if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { + return None; + } + match item { + hir::AssocItem::Function(_) | hir::AssocItem::Const(_) => {} + hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), + } + None::<()> + }); + } + } + PathResolution::Def(hir::ModuleDef::Trait(t)) => { + // Handles `Trait::assoc` as well as `::assoc`. + for item in t.items(ctx.db) { + if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { + continue; + } + match item { + hir::AssocItem::Function(func) => { + acc.add_function(ctx, func, None); + } + hir::AssocItem::Const(ct) => acc.add_const(ctx, ct), + hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), + } + } + } + PathResolution::TypeParam(_) | PathResolution::SelfType(_) => { + if let Some(krate) = ctx.krate { + let ty = match resolution { + PathResolution::TypeParam(param) => param.ty(ctx.db), + PathResolution::SelfType(impl_def) => impl_def.target_ty(ctx.db), + _ => return, + }; + + let traits_in_scope = ctx.scope.traits_in_scope(); + let mut seen = FxHashSet::default(); + ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| { + if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { + return None; + } + + // We might iterate candidates of a trait multiple times here, so deduplicate + // them. + if seen.insert(item) { + match item { + hir::AssocItem::Function(func) => { + acc.add_function(ctx, func, None); + } + hir::AssocItem::Const(ct) => acc.add_const(ctx, ct), + hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), + } + } + None::<()> + }); + } + } + _ => {} + } +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + use test_utils::mark; + + use crate::completion::{ + test_utils::{check_edit, completion_list}, + CompletionKind, + }; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Reference); + expect.assert_eq(&actual); + } + + fn check_builtin(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::BuiltinType); + expect.assert_eq(&actual); + } + + #[test] + fn dont_complete_current_use() { + mark::check!(dont_complete_current_use); + check(r#"use self::foo<|>;"#, expect![[""]]); + } + + #[test] + fn dont_complete_current_use_in_braces_with_glob() { + check( + r#" +mod foo { pub struct S; } +use self::{foo::*, bar<|>}; +"#, + expect![[r#" + st S + md foo + "#]], + ); + } + + #[test] + fn dont_complete_primitive_in_use() { + check_builtin(r#"use self::<|>;"#, expect![[""]]); + } + + #[test] + fn dont_complete_primitive_in_module_scope() { + check_builtin(r#"fn foo() { self::<|> }"#, expect![[""]]); + } + + #[test] + fn completes_primitives() { + check_builtin( + r#"fn main() { let _: <|> = 92; }"#, + expect![[r#" + bt bool + bt char + bt f32 + bt f64 + bt i128 + bt i16 + bt i32 + bt i64 + bt i8 + bt isize + bt str + bt u128 + bt u16 + bt u32 + bt u64 + bt u8 + bt usize + "#]], + ); + } + + #[test] + fn completes_mod_with_same_name_as_function() { + check( + r#" +use self::my::<|>; + +mod my { pub struct Bar; } +fn my() {} +"#, + expect![[r#" + st Bar + "#]], + ); + } + + #[test] + fn filters_visibility() { + check( + r#" +use self::my::<|>; + +mod my { + struct Bar; + pub struct Foo; + pub use Bar as PublicBar; +} +"#, + expect![[r#" + st Foo + st PublicBar + "#]], + ); + } + + #[test] + fn completes_use_item_starting_with_self() { + check( + r#" +use self::m::<|>; + +mod m { pub struct Bar; } +"#, + expect![[r#" + st Bar + "#]], + ); + } + + #[test] + fn completes_use_item_starting_with_crate() { + check( + r#" +//- /lib.rs +mod foo; +struct Spam; +//- /foo.rs +use crate::Sp<|> +"#, + expect![[r#" + st Spam + md foo + "#]], + ); + } + + #[test] + fn completes_nested_use_tree() { + check( + r#" +//- /lib.rs +mod foo; +struct Spam; +//- /foo.rs +use crate::{Sp<|>}; +"#, + expect![[r#" + st Spam + md foo + "#]], + ); + } + + #[test] + fn completes_deeply_nested_use_tree() { + check( + r#" +//- /lib.rs +mod foo; +pub mod bar { + pub mod baz { + pub struct Spam; + } +} +//- /foo.rs +use crate::{bar::{baz::Sp<|>}}; +"#, + expect![[r#" + st Spam + "#]], + ); + } + + #[test] + fn completes_enum_variant() { + check( + r#" +enum E { Foo, Bar(i32) } +fn foo() { let _ = E::<|> } +"#, + expect![[r#" + ev Bar(…) (i32) + ev Foo () + "#]], + ); + } + + #[test] + fn completes_struct_associated_items() { + check( + r#" +//- /lib.rs +struct S; + +impl S { + fn a() {} + fn b(&self) {} + const C: i32 = 42; + type T = i32; +} + +fn foo() { let _ = S::<|> } +"#, + expect![[r#" + ct C const C: i32 = 42; + ta T type T = i32; + fn a() fn a() + me b() fn b(&self) + "#]], + ); + } + + #[test] + fn associated_item_visibility() { + check( + r#" +struct S; + +mod m { + impl super::S { + pub(super) fn public_method() { } + fn private_method() { } + pub(super) type PublicType = u32; + type PrivateType = u32; + pub(super) const PUBLIC_CONST: u32 = 1; + const PRIVATE_CONST: u32 = 1; + } +} + +fn foo() { let _ = S::<|> } +"#, + expect![[r#" + ct PUBLIC_CONST pub(super) const PUBLIC_CONST: u32 = 1; + ta PublicType pub(super) type PublicType = u32; + fn public_method() pub(super) fn public_method() + "#]], + ); + } + + #[test] + fn completes_enum_associated_method() { + check( + r#" +enum E {}; +impl E { fn m() { } } + +fn foo() { let _ = E::<|> } + "#, + expect![[r#" + fn m() fn m() + "#]], + ); + } + + #[test] + fn completes_union_associated_method() { + check( + r#" +union U {}; +impl U { fn m() { } } + +fn foo() { let _ = U::<|> } +"#, + expect![[r#" + fn m() fn m() + "#]], + ); + } + + #[test] + fn completes_use_paths_across_crates() { + check( + r#" +//- /main.rs +use foo::<|>; + +//- /foo/lib.rs +pub mod bar { pub struct S; } +"#, + expect![[r#" + md bar + "#]], + ); + } + + #[test] + fn completes_trait_associated_method_1() { + check( + r#" +trait Trait { fn m(); } + +fn foo() { let _ = Trait::<|> } +"#, + expect![[r#" + fn m() fn m() + "#]], + ); + } + + #[test] + fn completes_trait_associated_method_2() { + check( + r#" +trait Trait { fn m(); } + +struct S; +impl Trait for S {} + +fn foo() { let _ = S::<|> } +"#, + expect![[r#" + fn m() fn m() + "#]], + ); + } + + #[test] + fn completes_trait_associated_method_3() { + check( + r#" +trait Trait { fn m(); } + +struct S; +impl Trait for S {} + +fn foo() { let _ = ::<|> } +"#, + expect![[r#" + fn m() fn m() + "#]], + ); + } + + #[test] + fn completes_ty_param_assoc_ty() { + check( + r#" +trait Super { + type Ty; + const CONST: u8; + fn func() {} + fn method(&self) {} +} + +trait Sub: Super { + type SubTy; + const C2: (); + fn subfunc() {} + fn submethod(&self) {} +} + +fn foo() { T::<|> } +"#, + expect![[r#" + ct C2 const C2: (); + ct CONST const CONST: u8; + ta SubTy type SubTy; + ta Ty type Ty; + fn func() fn func() + me method() fn method(&self) + fn subfunc() fn subfunc() + me submethod() fn submethod(&self) + "#]], + ); + } + + #[test] + fn completes_self_param_assoc_ty() { + check( + r#" +trait Super { + type Ty; + const CONST: u8 = 0; + fn func() {} + fn method(&self) {} +} + +trait Sub: Super { + type SubTy; + const C2: () = (); + fn subfunc() {} + fn submethod(&self) {} +} + +struct Wrap(T); +impl Super for Wrap {} +impl Sub for Wrap { + fn subfunc() { + // Should be able to assume `Self: Sub + Super` + Self::<|> + } +} +"#, + expect![[r#" + ct C2 const C2: () = (); + ct CONST const CONST: u8 = 0; + ta SubTy type SubTy; + ta Ty type Ty; + fn func() fn func() + me method() fn method(&self) + fn subfunc() fn subfunc() + me submethod() fn submethod(&self) + "#]], + ); + } + + #[test] + fn completes_type_alias() { + check( + r#" +struct S; +impl S { fn foo() {} } +type T = S; +impl T { fn bar() {} } + +fn main() { T::<|>; } +"#, + expect![[r#" + fn bar() fn bar() + fn foo() fn foo() + "#]], + ); + } + + #[test] + fn completes_qualified_macros() { + check( + r#" +#[macro_export] +macro_rules! foo { () => {} } + +fn main() { let _ = crate::<|> } + "#, + expect![[r##" + ma foo!(…) #[macro_export] + macro_rules! foo + fn main() fn main() + "##]], + ); + } + + #[test] + fn test_super_super_completion() { + check( + r#" +mod a { + const A: usize = 0; + mod b { + const B: usize = 0; + mod c { use super::super::<|> } + } +} +"#, + expect![[r#" + ct A + md b + "#]], + ); + } + + #[test] + fn completes_reexported_items_under_correct_name() { + check( + r#" +fn foo() { self::m::<|> } + +mod m { + pub use super::p::wrong_fn as right_fn; + pub use super::p::WRONG_CONST as RIGHT_CONST; + pub use super::p::WrongType as RightType; +} +mod p { + fn wrong_fn() {} + const WRONG_CONST: u32 = 1; + struct WrongType {}; +} +"#, + expect![[r#" + ct RIGHT_CONST + st RightType + fn right_fn() fn wrong_fn() + "#]], + ); + + check_edit( + "RightType", + r#" +fn foo() { self::m::<|> } + +mod m { + pub use super::p::wrong_fn as right_fn; + pub use super::p::WRONG_CONST as RIGHT_CONST; + pub use super::p::WrongType as RightType; +} +mod p { + fn wrong_fn() {} + const WRONG_CONST: u32 = 1; + struct WrongType {}; +} +"#, + r#" +fn foo() { self::m::RightType } + +mod m { + pub use super::p::wrong_fn as right_fn; + pub use super::p::WRONG_CONST as RIGHT_CONST; + pub use super::p::WrongType as RightType; +} +mod p { + fn wrong_fn() {} + const WRONG_CONST: u32 = 1; + struct WrongType {}; +} +"#, + ); + } + + #[test] + fn completes_in_simple_macro_call() { + check( + r#" +macro_rules! m { ($e:expr) => { $e } } +fn main() { m!(self::f<|>); } +fn foo() {} +"#, + expect![[r#" + fn foo() fn foo() + fn main() fn main() + "#]], + ); + } + + #[test] + fn function_mod_share_name() { + check( + r#" +fn foo() { self::m::<|> } + +mod m { + pub mod z {} + pub fn z() {} +} +"#, + expect![[r#" + md z + fn z() pub fn z() + "#]], + ); + } + + #[test] + fn completes_hashmap_new() { + check( + r#" +struct RandomState; +struct HashMap {} + +impl HashMap { + pub fn new() -> HashMap { } +} +fn foo() { + HashMap::<|> +} +"#, + expect![[r#" + fn new() pub fn new() -> HashMap + "#]], + ); + } + + #[test] + fn dont_complete_attr() { + check( + r#" +mod foo { pub struct Foo; } +#[foo::<|>] +fn f() {} +"#, + expect![[""]], + ); + } +} diff --git a/crates/ide/src/completion/complete_record.rs b/crates/ide/src/completion/complete_record.rs new file mode 100644 index 000000000..74b94594d --- /dev/null +++ b/crates/ide/src/completion/complete_record.rs @@ -0,0 +1,226 @@ +//! Complete fields in record literals and patterns. +use crate::completion::{CompletionContext, Completions}; + +pub(super) fn complete_record(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> { + let missing_fields = match (ctx.record_pat_syntax.as_ref(), ctx.record_lit_syntax.as_ref()) { + (None, None) => return None, + (Some(_), Some(_)) => unreachable!("A record cannot be both a literal and a pattern"), + (Some(record_pat), _) => ctx.sema.record_pattern_missing_fields(record_pat), + (_, Some(record_lit)) => ctx.sema.record_literal_missing_fields(record_lit), + }; + + for (field, ty) in missing_fields { + acc.add_field(ctx, field, &ty) + } + + Some(()) +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::completion::{test_utils::completion_list, CompletionKind}; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Reference); + expect.assert_eq(&actual); + } + + #[test] + fn test_record_pattern_field() { + check( + r#" +struct S { foo: u32 } + +fn process(f: S) { + match f { + S { f<|>: 92 } => (), + } +} +"#, + expect![[r#" + fd foo u32 + "#]], + ); + } + + #[test] + fn test_record_pattern_enum_variant() { + check( + r#" +enum E { S { foo: u32, bar: () } } + +fn process(e: E) { + match e { + E::S { <|> } => (), + } +} +"#, + expect![[r#" + fd bar () + fd foo u32 + "#]], + ); + } + + #[test] + fn test_record_pattern_field_in_simple_macro() { + check( + r" +macro_rules! m { ($e:expr) => { $e } } +struct S { foo: u32 } + +fn process(f: S) { + m!(match f { + S { f<|>: 92 } => (), + }) +} +", + expect![[r#" + fd foo u32 + "#]], + ); + } + + #[test] + fn only_missing_fields_are_completed_in_destruct_pats() { + check( + r#" +struct S { + foo1: u32, foo2: u32, + bar: u32, baz: u32, +} + +fn main() { + let s = S { + foo1: 1, foo2: 2, + bar: 3, baz: 4, + }; + if let S { foo1, foo2: a, <|> } = s {} +} +"#, + expect![[r#" + fd bar u32 + fd baz u32 + "#]], + ); + } + + #[test] + fn test_record_literal_field() { + check( + r#" +struct A { the_field: u32 } +fn foo() { + A { the<|> } +} +"#, + expect![[r#" + fd the_field u32 + "#]], + ); + } + + #[test] + fn test_record_literal_enum_variant() { + check( + r#" +enum E { A { a: u32 } } +fn foo() { + let _ = E::A { <|> } +} +"#, + expect![[r#" + fd a u32 + "#]], + ); + } + + #[test] + fn test_record_literal_two_structs() { + check( + r#" +struct A { a: u32 } +struct B { b: u32 } + +fn foo() { + let _: A = B { <|> } +} +"#, + expect![[r#" + fd b u32 + "#]], + ); + } + + #[test] + fn test_record_literal_generic_struct() { + check( + r#" +struct A { a: T } + +fn foo() { + let _: A = A { <|> } +} +"#, + expect![[r#" + fd a u32 + "#]], + ); + } + + #[test] + fn test_record_literal_field_in_simple_macro() { + check( + r#" +macro_rules! m { ($e:expr) => { $e } } +struct A { the_field: u32 } +fn foo() { + m!(A { the<|> }) +} +"#, + expect![[r#" + fd the_field u32 + "#]], + ); + } + + #[test] + fn only_missing_fields_are_completed() { + check( + r#" +struct S { + foo1: u32, foo2: u32, + bar: u32, baz: u32, +} + +fn main() { + let foo1 = 1; + let s = S { foo1, foo2: 5, <|> } +} +"#, + expect![[r#" + fd bar u32 + fd baz u32 + "#]], + ); + } + + #[test] + fn completes_functional_update() { + check( + r#" +struct S { foo1: u32, foo2: u32 } + +fn main() { + let foo1 = 1; + let s = S { foo1, <|> .. loop {} } +} +"#, + expect![[r#" + fd foo2 u32 + "#]], + ); + } +} diff --git a/crates/ide/src/completion/complete_snippet.rs b/crates/ide/src/completion/complete_snippet.rs new file mode 100644 index 000000000..4368e4eec --- /dev/null +++ b/crates/ide/src/completion/complete_snippet.rs @@ -0,0 +1,116 @@ +//! FIXME: write short doc here + +use crate::completion::{ + completion_config::SnippetCap, completion_item::Builder, CompletionContext, CompletionItem, + CompletionItemKind, CompletionKind, Completions, +}; + +fn snippet(ctx: &CompletionContext, cap: SnippetCap, label: &str, snippet: &str) -> Builder { + CompletionItem::new(CompletionKind::Snippet, ctx.source_range(), label) + .insert_snippet(cap, snippet) + .kind(CompletionItemKind::Snippet) +} + +pub(super) fn complete_expr_snippet(acc: &mut Completions, ctx: &CompletionContext) { + if !(ctx.is_trivial_path && ctx.function_syntax.is_some()) { + return; + } + let cap = match ctx.config.snippet_cap { + Some(it) => it, + None => return, + }; + + snippet(ctx, cap, "pd", "eprintln!(\"$0 = {:?}\", $0);").add_to(acc); + snippet(ctx, cap, "ppd", "eprintln!(\"$0 = {:#?}\", $0);").add_to(acc); +} + +pub(super) fn complete_item_snippet(acc: &mut Completions, ctx: &CompletionContext) { + if !ctx.is_new_item { + return; + } + let cap = match ctx.config.snippet_cap { + Some(it) => it, + None => return, + }; + + snippet( + ctx, + cap, + "tmod (Test module)", + "\ +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn ${1:test_name}() { + $0 + } +}", + ) + .lookup_by("tmod") + .add_to(acc); + + snippet( + ctx, + cap, + "tfn (Test function)", + "\ +#[test] +fn ${1:feature}() { + $0 +}", + ) + .lookup_by("tfn") + .add_to(acc); + + snippet(ctx, cap, "macro_rules", "macro_rules! $1 {\n\t($2) => {\n\t\t$0\n\t};\n}").add_to(acc); + snippet(ctx, cap, "pub(crate)", "pub(crate) $0").add_to(acc); +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::completion::{test_utils::completion_list, CompletionKind}; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Snippet); + expect.assert_eq(&actual) + } + + #[test] + fn completes_snippets_in_expressions() { + check( + r#"fn foo(x: i32) { <|> }"#, + expect![[r#" + sn pd + sn ppd + "#]], + ); + } + + #[test] + fn should_not_complete_snippets_in_path() { + check(r#"fn foo(x: i32) { ::foo<|> }"#, expect![[""]]); + check(r#"fn foo(x: i32) { ::<|> }"#, expect![[""]]); + } + + #[test] + fn completes_snippets_in_items() { + check( + r#" +#[cfg(test)] +mod tests { + <|> +} +"#, + expect![[r#" + sn macro_rules + sn pub(crate) + sn tfn (Test function) + sn tmod (Test module) + "#]], + ) + } +} diff --git a/crates/ide/src/completion/complete_trait_impl.rs b/crates/ide/src/completion/complete_trait_impl.rs new file mode 100644 index 000000000..478e31262 --- /dev/null +++ b/crates/ide/src/completion/complete_trait_impl.rs @@ -0,0 +1,488 @@ +//! Completion for associated items in a trait implementation. +//! +//! This module adds the completion items related to implementing associated +//! items within a `impl Trait for Struct` block. The current context node +//! must be within either a `FN`, `TYPE_ALIAS`, or `CONST` node +//! and an direct child of an `IMPL`. +//! +//! # Examples +//! +//! Considering the following trait `impl`: +//! +//! ```ignore +//! trait SomeTrait { +//! fn foo(); +//! } +//! +//! impl SomeTrait for () { +//! fn f<|> +//! } +//! ``` +//! +//! may result in the completion of the following method: +//! +//! ```ignore +//! # trait SomeTrait { +//! # fn foo(); +//! # } +//! +//! impl SomeTrait for () { +//! fn foo() {}<|> +//! } +//! ``` + +use assists::utils::get_missing_assoc_items; +use hir::{self, Docs, HasSource}; +use syntax::{ + ast::{self, edit, Impl}, + AstNode, SyntaxKind, SyntaxNode, TextRange, T, +}; +use text_edit::TextEdit; + +use crate::{ + completion::{ + CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions, + }, + display::function_declaration, +}; + +pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext) { + if let Some((trigger, impl_def)) = completion_match(ctx) { + match trigger.kind() { + SyntaxKind::NAME_REF => get_missing_assoc_items(&ctx.sema, &impl_def) + .into_iter() + .for_each(|item| match item { + hir::AssocItem::Function(fn_item) => { + add_function_impl(&trigger, acc, ctx, fn_item) + } + hir::AssocItem::TypeAlias(type_item) => { + add_type_alias_impl(&trigger, acc, ctx, type_item) + } + hir::AssocItem::Const(const_item) => { + add_const_impl(&trigger, acc, ctx, const_item) + } + }), + + SyntaxKind::FN => { + for missing_fn in get_missing_assoc_items(&ctx.sema, &impl_def) + .into_iter() + .filter_map(|item| match item { + hir::AssocItem::Function(fn_item) => Some(fn_item), + _ => None, + }) + { + add_function_impl(&trigger, acc, ctx, missing_fn); + } + } + + SyntaxKind::TYPE_ALIAS => { + for missing_fn in get_missing_assoc_items(&ctx.sema, &impl_def) + .into_iter() + .filter_map(|item| match item { + hir::AssocItem::TypeAlias(type_item) => Some(type_item), + _ => None, + }) + { + add_type_alias_impl(&trigger, acc, ctx, missing_fn); + } + } + + SyntaxKind::CONST => { + for missing_fn in get_missing_assoc_items(&ctx.sema, &impl_def) + .into_iter() + .filter_map(|item| match item { + hir::AssocItem::Const(const_item) => Some(const_item), + _ => None, + }) + { + add_const_impl(&trigger, acc, ctx, missing_fn); + } + } + + _ => {} + } + } +} + +fn completion_match(ctx: &CompletionContext) -> Option<(SyntaxNode, Impl)> { + let (trigger, impl_def_offset) = ctx.token.ancestors().find_map(|p| match p.kind() { + SyntaxKind::FN | SyntaxKind::TYPE_ALIAS | SyntaxKind::CONST | SyntaxKind::BLOCK_EXPR => { + Some((p, 2)) + } + SyntaxKind::NAME_REF => Some((p, 5)), + _ => None, + })?; + let impl_def = (0..impl_def_offset - 1) + .try_fold(trigger.parent()?, |t, _| t.parent()) + .and_then(ast::Impl::cast)?; + Some((trigger, impl_def)) +} + +fn add_function_impl( + fn_def_node: &SyntaxNode, + acc: &mut Completions, + ctx: &CompletionContext, + func: hir::Function, +) { + let fn_name = func.name(ctx.db).to_string(); + + let label = if !func.params(ctx.db).is_empty() { + format!("fn {}(..)", fn_name) + } else { + format!("fn {}()", fn_name) + }; + + let builder = CompletionItem::new(CompletionKind::Magic, ctx.source_range(), label) + .lookup_by(fn_name) + .set_documentation(func.docs(ctx.db)); + + let completion_kind = if func.has_self_param(ctx.db) { + CompletionItemKind::Method + } else { + CompletionItemKind::Function + }; + let range = TextRange::new(fn_def_node.text_range().start(), ctx.source_range().end()); + + let function_decl = function_declaration(&func.source(ctx.db).value); + match ctx.config.snippet_cap { + Some(cap) => { + let snippet = format!("{} {{\n $0\n}}", function_decl); + builder.snippet_edit(cap, TextEdit::replace(range, snippet)) + } + None => { + let header = format!("{} {{", function_decl); + builder.text_edit(TextEdit::replace(range, header)) + } + } + .kind(completion_kind) + .add_to(acc); +} + +fn add_type_alias_impl( + type_def_node: &SyntaxNode, + acc: &mut Completions, + ctx: &CompletionContext, + type_alias: hir::TypeAlias, +) { + let alias_name = type_alias.name(ctx.db).to_string(); + + let snippet = format!("type {} = ", alias_name); + + let range = TextRange::new(type_def_node.text_range().start(), ctx.source_range().end()); + + CompletionItem::new(CompletionKind::Magic, ctx.source_range(), snippet.clone()) + .text_edit(TextEdit::replace(range, snippet)) + .lookup_by(alias_name) + .kind(CompletionItemKind::TypeAlias) + .set_documentation(type_alias.docs(ctx.db)) + .add_to(acc); +} + +fn add_const_impl( + const_def_node: &SyntaxNode, + acc: &mut Completions, + ctx: &CompletionContext, + const_: hir::Const, +) { + let const_name = const_.name(ctx.db).map(|n| n.to_string()); + + if let Some(const_name) = const_name { + let snippet = make_const_compl_syntax(&const_.source(ctx.db).value); + + let range = TextRange::new(const_def_node.text_range().start(), ctx.source_range().end()); + + CompletionItem::new(CompletionKind::Magic, ctx.source_range(), snippet.clone()) + .text_edit(TextEdit::replace(range, snippet)) + .lookup_by(const_name) + .kind(CompletionItemKind::Const) + .set_documentation(const_.docs(ctx.db)) + .add_to(acc); + } +} + +fn make_const_compl_syntax(const_: &ast::Const) -> String { + let const_ = edit::remove_attrs_and_docs(const_); + + let const_start = const_.syntax().text_range().start(); + let const_end = const_.syntax().text_range().end(); + + let start = + const_.syntax().first_child_or_token().map_or(const_start, |f| f.text_range().start()); + + let end = const_ + .syntax() + .children_with_tokens() + .find(|s| s.kind() == T![;] || s.kind() == T![=]) + .map_or(const_end, |f| f.text_range().start()); + + let len = end - start; + let range = TextRange::new(0.into(), len); + + let syntax = const_.syntax().text().slice(range).to_string(); + + format!("{} = ", syntax.trim_end()) +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::completion::{ + test_utils::{check_edit, completion_list}, + CompletionKind, + }; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Magic); + expect.assert_eq(&actual) + } + + #[test] + fn name_ref_function_type_const() { + check( + r#" +trait Test { + type TestType; + const TEST_CONST: u16; + fn test(); +} +struct T; + +impl Test for T { + t<|> +} +"#, + expect![[" +ct const TEST_CONST: u16 = \n\ +fn fn test() +ta type TestType = \n\ + "]], + ); + } + + #[test] + fn no_nested_fn_completions() { + check( + r" +trait Test { + fn test(); + fn test2(); +} +struct T; + +impl Test for T { + fn test() { + t<|> + } +} +", + expect![[""]], + ); + } + + #[test] + fn name_ref_single_function() { + check_edit( + "test", + r#" +trait Test { + fn test(); +} +struct T; + +impl Test for T { + t<|> +} +"#, + r#" +trait Test { + fn test(); +} +struct T; + +impl Test for T { + fn test() { + $0 +} +} +"#, + ); + } + + #[test] + fn single_function() { + check_edit( + "test", + r#" +trait Test { + fn test(); +} +struct T; + +impl Test for T { + fn t<|> +} +"#, + r#" +trait Test { + fn test(); +} +struct T; + +impl Test for T { + fn test() { + $0 +} +} +"#, + ); + } + + #[test] + fn hide_implemented_fn() { + check( + r#" +trait Test { + fn foo(); + fn foo_bar(); +} +struct T; + +impl Test for T { + fn foo() {} + fn f<|> +} +"#, + expect![[r#" + fn fn foo_bar() + "#]], + ); + } + + #[test] + fn generic_fn() { + check_edit( + "foo", + r#" +trait Test { + fn foo(); +} +struct T; + +impl Test for T { + fn f<|> +} +"#, + r#" +trait Test { + fn foo(); +} +struct T; + +impl Test for T { + fn foo() { + $0 +} +} +"#, + ); + check_edit( + "foo", + r#" +trait Test { + fn foo() where T: Into; +} +struct T; + +impl Test for T { + fn f<|> +} +"#, + r#" +trait Test { + fn foo() where T: Into; +} +struct T; + +impl Test for T { + fn foo() +where T: Into { + $0 +} +} +"#, + ); + } + + #[test] + fn associated_type() { + check_edit( + "SomeType", + r#" +trait Test { + type SomeType; +} + +impl Test for () { + type S<|> +} +"#, + " +trait Test { + type SomeType; +} + +impl Test for () { + type SomeType = \n\ +} +", + ); + } + + #[test] + fn associated_const() { + check_edit( + "SOME_CONST", + r#" +trait Test { + const SOME_CONST: u16; +} + +impl Test for () { + const S<|> +} +"#, + " +trait Test { + const SOME_CONST: u16; +} + +impl Test for () { + const SOME_CONST: u16 = \n\ +} +", + ); + + check_edit( + "SOME_CONST", + r#" +trait Test { + const SOME_CONST: u16 = 92; +} + +impl Test for () { + const S<|> +} +"#, + " +trait Test { + const SOME_CONST: u16 = 92; +} + +impl Test for () { + const SOME_CONST: u16 = \n\ +} +", + ); + } +} diff --git a/crates/ide/src/completion/complete_unqualified_path.rs b/crates/ide/src/completion/complete_unqualified_path.rs new file mode 100644 index 000000000..824227f31 --- /dev/null +++ b/crates/ide/src/completion/complete_unqualified_path.rs @@ -0,0 +1,658 @@ +//! Completion of names from the current scope, e.g. locals and imported items. + +use hir::{Adt, ModuleDef, ScopeDef, Type}; +use syntax::AstNode; +use test_utils::mark; + +use crate::completion::{CompletionContext, Completions}; + +pub(super) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionContext) { + if !(ctx.is_trivial_path || ctx.is_pat_binding_or_const) { + return; + } + if ctx.record_lit_syntax.is_some() + || ctx.record_pat_syntax.is_some() + || ctx.attribute_under_caret.is_some() + { + return; + } + + if let Some(ty) = &ctx.expected_type { + complete_enum_variants(acc, ctx, ty); + } + + if ctx.is_pat_binding_or_const { + return; + } + + ctx.scope.process_all_names(&mut |name, res| { + if ctx.use_item_syntax.is_some() { + if let (ScopeDef::Unknown, Some(name_ref)) = (&res, &ctx.name_ref_syntax) { + if name_ref.syntax().text() == name.to_string().as_str() { + mark::hit!(self_fulfilling_completion); + return; + } + } + } + acc.add_resolution(ctx, name.to_string(), &res) + }); +} + +fn complete_enum_variants(acc: &mut Completions, ctx: &CompletionContext, ty: &Type) { + if let Some(Adt::Enum(enum_data)) = ty.as_adt() { + let variants = enum_data.variants(ctx.db); + + let module = if let Some(module) = ctx.scope.module() { + // Compute path from the completion site if available. + module + } else { + // Otherwise fall back to the enum's definition site. + enum_data.module(ctx.db) + }; + + for variant in variants { + if let Some(path) = module.find_use_path(ctx.db, ModuleDef::from(variant)) { + // Variants with trivial paths are already added by the existing completion logic, + // so we should avoid adding these twice + if path.segments.len() > 1 { + acc.add_qualified_enum_variant(ctx, variant, path); + } + } + } + } +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + use test_utils::mark; + + use crate::completion::{ + test_utils::{check_edit, completion_list}, + CompletionKind, + }; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = completion_list(ra_fixture, CompletionKind::Reference); + expect.assert_eq(&actual) + } + + #[test] + fn self_fulfilling_completion() { + mark::check!(self_fulfilling_completion); + check( + r#" +use foo<|> +use std::collections; +"#, + expect![[r#" + ?? collections + "#]], + ); + } + + #[test] + fn bind_pat_and_path_ignore_at() { + check( + r#" +enum Enum { A, B } +fn quux(x: Option) { + match x { + None => (), + Some(en<|> @ Enum::A) => (), + } +} +"#, + expect![[""]], + ); + } + + #[test] + fn bind_pat_and_path_ignore_ref() { + check( + r#" +enum Enum { A, B } +fn quux(x: Option) { + match x { + None => (), + Some(ref en<|>) => (), + } +} +"#, + expect![[""]], + ); + } + + #[test] + fn bind_pat_and_path() { + check( + r#" +enum Enum { A, B } +fn quux(x: Option) { + match x { + None => (), + Some(En<|>) => (), + } +} +"#, + expect![[r#" + en Enum + "#]], + ); + } + + #[test] + fn completes_bindings_from_let() { + check( + r#" +fn quux(x: i32) { + let y = 92; + 1 + <|>; + let z = (); +} +"#, + expect![[r#" + fn quux(…) fn quux(x: i32) + bn x i32 + bn y i32 + "#]], + ); + } + + #[test] + fn completes_bindings_from_if_let() { + check( + r#" +fn quux() { + if let Some(x) = foo() { + let y = 92; + }; + if let Some(a) = bar() { + let b = 62; + 1 + <|> + } +} +"#, + expect![[r#" + bn a + bn b i32 + fn quux() fn quux() + "#]], + ); + } + + #[test] + fn completes_bindings_from_for() { + check( + r#" +fn quux() { + for x in &[1, 2, 3] { <|> } +} +"#, + expect![[r#" + fn quux() fn quux() + bn x + "#]], + ); + } + + #[test] + fn completes_if_prefix_is_keyword() { + mark::check!(completes_if_prefix_is_keyword); + check_edit( + "wherewolf", + r#" +fn main() { + let wherewolf = 92; + drop(where<|>) +} +"#, + r#" +fn main() { + let wherewolf = 92; + drop(wherewolf) +} +"#, + ) + } + + #[test] + fn completes_generic_params() { + check( + r#"fn quux() { <|> }"#, + expect![[r#" + tp T + fn quux() fn quux() + "#]], + ); + } + + #[test] + fn completes_generic_params_in_struct() { + check( + r#"struct S { x: <|>}"#, + expect![[r#" + st S<…> + tp Self + tp T + "#]], + ); + } + + #[test] + fn completes_self_in_enum() { + check( + r#"enum X { Y(<|>) }"#, + expect![[r#" + tp Self + en X + "#]], + ); + } + + #[test] + fn completes_module_items() { + check( + r#" +struct S; +enum E {} +fn quux() { <|> } +"#, + expect![[r#" + en E + st S + fn quux() fn quux() + "#]], + ); + } + + #[test] + fn completes_extern_prelude() { + check( + r#" +//- /lib.rs +use <|>; + +//- /other_crate/lib.rs +// nothing here +"#, + expect![[r#" + md other_crate + "#]], + ); + } + + #[test] + fn completes_module_items_in_nested_modules() { + check( + r#" +struct Foo; +mod m { + struct Bar; + fn quux() { <|> } +} +"#, + expect![[r#" + st Bar + fn quux() fn quux() + "#]], + ); + } + + #[test] + fn completes_return_type() { + check( + r#" +struct Foo; +fn x() -> <|> +"#, + expect![[r#" + st Foo + fn x() fn x() + "#]], + ); + } + + #[test] + fn dont_show_both_completions_for_shadowing() { + check( + r#" +fn foo() { + let bar = 92; + { + let bar = 62; + drop(<|>) + } +} +"#, + // FIXME: should be only one bar here + expect![[r#" + bn bar i32 + bn bar i32 + fn foo() fn foo() + "#]], + ); + } + + #[test] + fn completes_self_in_methods() { + check( + r#"impl S { fn foo(&self) { <|> } }"#, + expect![[r#" + tp Self + bn self &{unknown} + "#]], + ); + } + + #[test] + fn completes_prelude() { + check( + r#" +//- /main.rs +fn foo() { let x: <|> } + +//- /std/lib.rs +#[prelude_import] +use prelude::*; + +mod prelude { struct Option; } +"#, + expect![[r#" + st Option + fn foo() fn foo() + md std + "#]], + ); + } + + #[test] + fn completes_std_prelude_if_core_is_defined() { + check( + r#" +//- /main.rs +fn foo() { let x: <|> } + +//- /core/lib.rs +#[prelude_import] +use prelude::*; + +mod prelude { struct Option; } + +//- /std/lib.rs +#[prelude_import] +use prelude::*; + +mod prelude { struct String; } +"#, + expect![[r#" + st String + md core + fn foo() fn foo() + md std + "#]], + ); + } + + #[test] + fn completes_macros_as_value() { + check( + r#" +macro_rules! foo { () => {} } + +#[macro_use] +mod m1 { + macro_rules! bar { () => {} } +} + +mod m2 { + macro_rules! nope { () => {} } + + #[macro_export] + macro_rules! baz { () => {} } +} + +fn main() { let v = <|> } +"#, + expect![[r##" + ma bar!(…) macro_rules! bar + ma baz!(…) #[macro_export] + macro_rules! baz + ma foo!(…) macro_rules! foo + md m1 + md m2 + fn main() fn main() + "##]], + ); + } + + #[test] + fn completes_both_macro_and_value() { + check( + r#" +macro_rules! foo { () => {} } +fn foo() { <|> } +"#, + expect![[r#" + ma foo!(…) macro_rules! foo + fn foo() fn foo() + "#]], + ); + } + + #[test] + fn completes_macros_as_type() { + check( + r#" +macro_rules! foo { () => {} } +fn main() { let x: <|> } +"#, + expect![[r#" + ma foo!(…) macro_rules! foo + fn main() fn main() + "#]], + ); + } + + #[test] + fn completes_macros_as_stmt() { + check( + r#" +macro_rules! foo { () => {} } +fn main() { <|> } +"#, + expect![[r#" + ma foo!(…) macro_rules! foo + fn main() fn main() + "#]], + ); + } + + #[test] + fn completes_local_item() { + check( + r#" +fn main() { + return f<|>; + fn frobnicate() {} +} +"#, + expect![[r#" + fn frobnicate() fn frobnicate() + fn main() fn main() + "#]], + ); + } + + #[test] + fn completes_in_simple_macro_1() { + check( + r#" +macro_rules! m { ($e:expr) => { $e } } +fn quux(x: i32) { + let y = 92; + m!(<|>); +} +"#, + expect![[r#" + ma m!(…) macro_rules! m + fn quux(…) fn quux(x: i32) + bn x i32 + bn y i32 + "#]], + ); + } + + #[test] + fn completes_in_simple_macro_2() { + check( + r" +macro_rules! m { ($e:expr) => { $e } } +fn quux(x: i32) { + let y = 92; + m!(x<|>); +} +", + expect![[r#" + ma m!(…) macro_rules! m + fn quux(…) fn quux(x: i32) + bn x i32 + bn y i32 + "#]], + ); + } + + #[test] + fn completes_in_simple_macro_without_closing_parens() { + check( + r#" +macro_rules! m { ($e:expr) => { $e } } +fn quux(x: i32) { + let y = 92; + m!(x<|> +} +"#, + expect![[r#" + ma m!(…) macro_rules! m + fn quux(…) fn quux(x: i32) + bn x i32 + bn y i32 + "#]], + ); + } + + #[test] + fn completes_unresolved_uses() { + check( + r#" +use spam::Quux; + +fn main() { <|> } +"#, + expect![[r#" + ?? Quux + fn main() fn main() + "#]], + ); + } + #[test] + fn completes_enum_variant_matcharm() { + check( + r#" +enum Foo { Bar, Baz, Quux } + +fn main() { + let foo = Foo::Quux; + match foo { Qu<|> } +} +"#, + expect![[r#" + en Foo + ev Foo::Bar () + ev Foo::Baz () + ev Foo::Quux () + "#]], + ) + } + + #[test] + fn completes_enum_variant_iflet() { + check( + r#" +enum Foo { Bar, Baz, Quux } + +fn main() { + let foo = Foo::Quux; + if let Qu<|> = foo { } +} +"#, + expect![[r#" + en Foo + ev Foo::Bar () + ev Foo::Baz () + ev Foo::Quux () + "#]], + ) + } + + #[test] + fn completes_enum_variant_basic_expr() { + check( + r#" +enum Foo { Bar, Baz, Quux } +fn main() { let foo: Foo = Q<|> } +"#, + expect![[r#" + en Foo + ev Foo::Bar () + ev Foo::Baz () + ev Foo::Quux () + fn main() fn main() + "#]], + ) + } + + #[test] + fn completes_enum_variant_from_module() { + check( + r#" +mod m { pub enum E { V } } +fn f() -> m::E { V<|> } +"#, + expect![[r#" + fn f() fn f() -> m::E + md m + ev m::E::V () + "#]], + ) + } + + #[test] + fn dont_complete_attr() { + check( + r#" +struct Foo; +#[<|>] +fn f() {} +"#, + expect![[""]], + ) + } + + #[test] + fn completes_type_or_trait_in_impl_block() { + check( + r#" +trait MyTrait {} +struct MyStruct {} + +impl My<|> +"#, + expect![[r#" + st MyStruct + tt MyTrait + tp Self + "#]], + ) + } +} diff --git a/crates/ide/src/completion/completion_config.rs b/crates/ide/src/completion/completion_config.rs new file mode 100644 index 000000000..71b49ace8 --- /dev/null +++ b/crates/ide/src/completion/completion_config.rs @@ -0,0 +1,35 @@ +//! Settings for tweaking completion. +//! +//! The fun thing here is `SnippetCap` -- this type can only be created in this +//! module, and we use to statically check that we only produce snippet +//! completions if we are allowed to. + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct CompletionConfig { + pub enable_postfix_completions: bool, + pub add_call_parenthesis: bool, + pub add_call_argument_snippets: bool, + pub snippet_cap: Option, +} + +impl CompletionConfig { + pub fn allow_snippets(&mut self, yes: bool) { + self.snippet_cap = if yes { Some(SnippetCap { _private: () }) } else { None } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct SnippetCap { + _private: (), +} + +impl Default for CompletionConfig { + fn default() -> Self { + CompletionConfig { + enable_postfix_completions: true, + add_call_parenthesis: true, + add_call_argument_snippets: true, + snippet_cap: Some(SnippetCap { _private: () }), + } + } +} diff --git a/crates/ide/src/completion/completion_context.rs b/crates/ide/src/completion/completion_context.rs new file mode 100644 index 000000000..047ecd9d7 --- /dev/null +++ b/crates/ide/src/completion/completion_context.rs @@ -0,0 +1,465 @@ +//! FIXME: write short doc here + +use base_db::SourceDatabase; +use hir::{Semantics, SemanticsScope, Type}; +use ide_db::RootDatabase; +use syntax::{ + algo::{find_covering_element, find_node_at_offset}, + ast, match_ast, AstNode, NodeOrToken, + SyntaxKind::*, + SyntaxNode, SyntaxToken, TextRange, TextSize, +}; +use text_edit::Indel; + +use super::patterns::{ + has_bind_pat_parent, has_block_expr_parent, has_impl_as_prev_sibling, has_impl_parent, + has_item_list_or_source_file_parent, has_ref_parent, has_trait_as_prev_sibling, + has_trait_parent, if_is_prev, is_in_loop_body, is_match_arm, unsafe_is_prev, +}; +use crate::{call_info::ActiveParameter, completion::CompletionConfig, FilePosition}; +use test_utils::mark; + +/// `CompletionContext` is created early during completion to figure out, where +/// exactly is the cursor, syntax-wise. +#[derive(Debug)] +pub(crate) struct CompletionContext<'a> { + pub(super) sema: Semantics<'a, RootDatabase>, + pub(super) scope: SemanticsScope<'a>, + pub(super) db: &'a RootDatabase, + pub(super) config: &'a CompletionConfig, + pub(super) position: FilePosition, + /// The token before the cursor, in the original file. + pub(super) original_token: SyntaxToken, + /// The token before the cursor, in the macro-expanded file. + pub(super) token: SyntaxToken, + pub(super) krate: Option, + pub(super) expected_type: Option, + pub(super) name_ref_syntax: Option, + pub(super) function_syntax: Option, + pub(super) use_item_syntax: Option, + pub(super) record_lit_syntax: Option, + pub(super) record_pat_syntax: Option, + pub(super) record_field_syntax: Option, + pub(super) impl_def: Option, + /// FIXME: `ActiveParameter` is string-based, which is very very wrong + pub(super) active_parameter: Option, + pub(super) is_param: bool, + /// If a name-binding or reference to a const in a pattern. + /// Irrefutable patterns (like let) are excluded. + pub(super) is_pat_binding_or_const: bool, + /// A single-indent path, like `foo`. `::foo` should not be considered a trivial path. + pub(super) is_trivial_path: bool, + /// If not a trivial path, the prefix (qualifier). + pub(super) path_prefix: Option, + pub(super) after_if: bool, + /// `true` if we are a statement or a last expr in the block. + pub(super) can_be_stmt: bool, + /// `true` if we expect an expression at the cursor position. + pub(super) is_expr: bool, + /// Something is typed at the "top" level, in module or impl/trait. + pub(super) is_new_item: bool, + /// The receiver if this is a field or method access, i.e. writing something.<|> + pub(super) dot_receiver: Option, + pub(super) dot_receiver_is_ambiguous_float_literal: bool, + /// If this is a call (method or function) in particular, i.e. the () are already there. + pub(super) is_call: bool, + /// Like `is_call`, but for tuple patterns. + pub(super) is_pattern_call: bool, + /// If this is a macro call, i.e. the () are already there. + pub(super) is_macro_call: bool, + pub(super) is_path_type: bool, + pub(super) has_type_args: bool, + pub(super) attribute_under_caret: Option, + pub(super) unsafe_is_prev: bool, + pub(super) if_is_prev: bool, + pub(super) block_expr_parent: bool, + pub(super) bind_pat_parent: bool, + pub(super) ref_pat_parent: bool, + pub(super) in_loop_body: bool, + pub(super) has_trait_parent: bool, + pub(super) has_impl_parent: bool, + pub(super) trait_as_prev_sibling: bool, + pub(super) impl_as_prev_sibling: bool, + pub(super) is_match_arm: bool, + pub(super) has_item_list_or_source_file_parent: bool, +} + +impl<'a> CompletionContext<'a> { + pub(super) fn new( + db: &'a RootDatabase, + position: FilePosition, + config: &'a CompletionConfig, + ) -> Option> { + let sema = Semantics::new(db); + + let original_file = sema.parse(position.file_id); + + // Insert a fake ident to get a valid parse tree. We will use this file + // to determine context, though the original_file will be used for + // actual completion. + let file_with_fake_ident = { + let parse = db.parse(position.file_id); + let edit = Indel::insert(position.offset, "intellijRulezz".to_string()); + parse.reparse(&edit).tree() + }; + let fake_ident_token = + file_with_fake_ident.syntax().token_at_offset(position.offset).right_biased().unwrap(); + + let krate = sema.to_module_def(position.file_id).map(|m| m.krate()); + let original_token = + original_file.syntax().token_at_offset(position.offset).left_biased()?; + let token = sema.descend_into_macros(original_token.clone()); + let scope = sema.scope_at_offset(&token.parent(), position.offset); + let mut ctx = CompletionContext { + sema, + scope, + db, + config, + original_token, + token, + position, + krate, + expected_type: None, + name_ref_syntax: None, + function_syntax: None, + use_item_syntax: None, + record_lit_syntax: None, + record_pat_syntax: None, + record_field_syntax: None, + impl_def: None, + active_parameter: ActiveParameter::at(db, position), + is_param: false, + is_pat_binding_or_const: false, + is_trivial_path: false, + path_prefix: None, + after_if: false, + can_be_stmt: false, + is_expr: false, + is_new_item: false, + dot_receiver: None, + is_call: false, + is_pattern_call: false, + is_macro_call: false, + is_path_type: false, + has_type_args: false, + dot_receiver_is_ambiguous_float_literal: false, + attribute_under_caret: None, + unsafe_is_prev: false, + in_loop_body: false, + ref_pat_parent: false, + bind_pat_parent: false, + block_expr_parent: false, + has_trait_parent: false, + has_impl_parent: false, + trait_as_prev_sibling: false, + impl_as_prev_sibling: false, + if_is_prev: false, + is_match_arm: false, + has_item_list_or_source_file_parent: false, + }; + + let mut original_file = original_file.syntax().clone(); + let mut hypothetical_file = file_with_fake_ident.syntax().clone(); + let mut offset = position.offset; + let mut fake_ident_token = fake_ident_token; + + // Are we inside a macro call? + while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = ( + find_node_at_offset::(&original_file, offset), + find_node_at_offset::(&hypothetical_file, offset), + ) { + if actual_macro_call.path().as_ref().map(|s| s.syntax().text()) + != macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text()) + { + break; + } + let hypothetical_args = match macro_call_with_fake_ident.token_tree() { + Some(tt) => tt, + None => break, + }; + if let (Some(actual_expansion), Some(hypothetical_expansion)) = ( + ctx.sema.expand(&actual_macro_call), + ctx.sema.expand_hypothetical( + &actual_macro_call, + &hypothetical_args, + fake_ident_token, + ), + ) { + let new_offset = hypothetical_expansion.1.text_range().start(); + if new_offset > actual_expansion.text_range().end() { + break; + } + original_file = actual_expansion; + hypothetical_file = hypothetical_expansion.0; + fake_ident_token = hypothetical_expansion.1; + offset = new_offset; + } else { + break; + } + } + ctx.fill_keyword_patterns(&hypothetical_file, offset); + ctx.fill(&original_file, hypothetical_file, offset); + Some(ctx) + } + + // The range of the identifier that is being completed. + pub(crate) fn source_range(&self) -> TextRange { + // check kind of macro-expanded token, but use range of original token + if self.token.kind() == IDENT || self.token.kind().is_keyword() { + mark::hit!(completes_if_prefix_is_keyword); + self.original_token.text_range() + } else { + TextRange::empty(self.position.offset) + } + } + + fn fill_keyword_patterns(&mut self, file_with_fake_ident: &SyntaxNode, offset: TextSize) { + let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased().unwrap(); + let syntax_element = NodeOrToken::Token(fake_ident_token); + self.block_expr_parent = has_block_expr_parent(syntax_element.clone()); + self.unsafe_is_prev = unsafe_is_prev(syntax_element.clone()); + self.if_is_prev = if_is_prev(syntax_element.clone()); + self.bind_pat_parent = has_bind_pat_parent(syntax_element.clone()); + self.ref_pat_parent = has_ref_parent(syntax_element.clone()); + self.in_loop_body = is_in_loop_body(syntax_element.clone()); + self.has_trait_parent = has_trait_parent(syntax_element.clone()); + self.has_impl_parent = has_impl_parent(syntax_element.clone()); + self.impl_as_prev_sibling = has_impl_as_prev_sibling(syntax_element.clone()); + self.trait_as_prev_sibling = has_trait_as_prev_sibling(syntax_element.clone()); + self.is_match_arm = is_match_arm(syntax_element.clone()); + self.has_item_list_or_source_file_parent = + has_item_list_or_source_file_parent(syntax_element); + } + + fn fill( + &mut self, + original_file: &SyntaxNode, + file_with_fake_ident: SyntaxNode, + offset: TextSize, + ) { + // FIXME: this is wrong in at least two cases: + // * when there's no token `foo(<|>)` + // * when there is a token, but it happens to have type of it's own + self.expected_type = self + .token + .ancestors() + .find_map(|node| { + let ty = match_ast! { + match node { + ast::Pat(it) => self.sema.type_of_pat(&it), + ast::Expr(it) => self.sema.type_of_expr(&it), + _ => return None, + } + }; + Some(ty) + }) + .flatten(); + self.attribute_under_caret = find_node_at_offset(&file_with_fake_ident, offset); + + // First, let's try to complete a reference to some declaration. + if let Some(name_ref) = find_node_at_offset::(&file_with_fake_ident, offset) { + // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. + // See RFC#1685. + if is_node::(name_ref.syntax()) { + self.is_param = true; + return; + } + // FIXME: remove this (V) duplication and make the check more precise + if name_ref.syntax().ancestors().find_map(ast::RecordPatFieldList::cast).is_some() { + self.record_pat_syntax = + self.sema.find_node_at_offset_with_macros(&original_file, offset); + } + self.classify_name_ref(original_file, name_ref, offset); + } + + // Otherwise, see if this is a declaration. We can use heuristics to + // suggest declaration names, see `CompletionKind::Magic`. + if let Some(name) = find_node_at_offset::(&file_with_fake_ident, offset) { + if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::IdentPat::cast) { + self.is_pat_binding_or_const = true; + if bind_pat.at_token().is_some() + || bind_pat.ref_token().is_some() + || bind_pat.mut_token().is_some() + { + self.is_pat_binding_or_const = false; + } + if bind_pat.syntax().parent().and_then(ast::RecordPatFieldList::cast).is_some() { + self.is_pat_binding_or_const = false; + } + if let Some(let_stmt) = bind_pat.syntax().ancestors().find_map(ast::LetStmt::cast) { + if let Some(pat) = let_stmt.pat() { + if pat.syntax().text_range().contains_range(bind_pat.syntax().text_range()) + { + self.is_pat_binding_or_const = false; + } + } + } + } + if is_node::(name.syntax()) { + self.is_param = true; + return; + } + // FIXME: remove this (^) duplication and make the check more precise + if name.syntax().ancestors().find_map(ast::RecordPatFieldList::cast).is_some() { + self.record_pat_syntax = + self.sema.find_node_at_offset_with_macros(&original_file, offset); + } + } + } + + fn classify_name_ref( + &mut self, + original_file: &SyntaxNode, + name_ref: ast::NameRef, + offset: TextSize, + ) { + self.name_ref_syntax = + find_node_at_offset(&original_file, name_ref.syntax().text_range().start()); + let name_range = name_ref.syntax().text_range(); + if ast::RecordExprField::for_field_name(&name_ref).is_some() { + self.record_lit_syntax = + self.sema.find_node_at_offset_with_macros(&original_file, offset); + } + + self.impl_def = self + .sema + .ancestors_with_macros(self.token.parent()) + .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) + .find_map(ast::Impl::cast); + + let top_node = name_ref + .syntax() + .ancestors() + .take_while(|it| it.text_range() == name_range) + .last() + .unwrap(); + + match top_node.parent().map(|it| it.kind()) { + Some(SOURCE_FILE) | Some(ITEM_LIST) => { + self.is_new_item = true; + return; + } + _ => (), + } + + self.use_item_syntax = + self.sema.ancestors_with_macros(self.token.parent()).find_map(ast::Use::cast); + + self.function_syntax = self + .sema + .ancestors_with_macros(self.token.parent()) + .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) + .find_map(ast::Fn::cast); + + self.record_field_syntax = self + .sema + .ancestors_with_macros(self.token.parent()) + .take_while(|it| { + it.kind() != SOURCE_FILE && it.kind() != MODULE && it.kind() != CALL_EXPR + }) + .find_map(ast::RecordExprField::cast); + + let parent = match name_ref.syntax().parent() { + Some(it) => it, + None => return, + }; + + if let Some(segment) = ast::PathSegment::cast(parent.clone()) { + let path = segment.parent_path(); + self.is_call = path + .syntax() + .parent() + .and_then(ast::PathExpr::cast) + .and_then(|it| it.syntax().parent().and_then(ast::CallExpr::cast)) + .is_some(); + self.is_macro_call = path.syntax().parent().and_then(ast::MacroCall::cast).is_some(); + self.is_pattern_call = + path.syntax().parent().and_then(ast::TupleStructPat::cast).is_some(); + + self.is_path_type = path.syntax().parent().and_then(ast::PathType::cast).is_some(); + self.has_type_args = segment.generic_arg_list().is_some(); + + let hygiene = hir::Hygiene::new(self.db, self.position.file_id.into()); + if let Some(path) = hir::Path::from_src(path.clone(), &hygiene) { + if let Some(path_prefix) = path.qualifier() { + self.path_prefix = Some(path_prefix); + return; + } + } + + if path.qualifier().is_none() { + self.is_trivial_path = true; + + // Find either enclosing expr statement (thing with `;`) or a + // block. If block, check that we are the last expr. + self.can_be_stmt = name_ref + .syntax() + .ancestors() + .find_map(|node| { + if let Some(stmt) = ast::ExprStmt::cast(node.clone()) { + return Some( + stmt.syntax().text_range() == name_ref.syntax().text_range(), + ); + } + if let Some(block) = ast::BlockExpr::cast(node) { + return Some( + block.expr().map(|e| e.syntax().text_range()) + == Some(name_ref.syntax().text_range()), + ); + } + None + }) + .unwrap_or(false); + self.is_expr = path.syntax().parent().and_then(ast::PathExpr::cast).is_some(); + + if let Some(off) = name_ref.syntax().text_range().start().checked_sub(2.into()) { + if let Some(if_expr) = + self.sema.find_node_at_offset_with_macros::(original_file, off) + { + if if_expr.syntax().text_range().end() + < name_ref.syntax().text_range().start() + { + self.after_if = true; + } + } + } + } + } + if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { + // The receiver comes before the point of insertion of the fake + // ident, so it should have the same range in the non-modified file + self.dot_receiver = field_expr + .expr() + .map(|e| e.syntax().text_range()) + .and_then(|r| find_node_with_range(original_file, r)); + self.dot_receiver_is_ambiguous_float_literal = + if let Some(ast::Expr::Literal(l)) = &self.dot_receiver { + match l.kind() { + ast::LiteralKind::FloatNumber { .. } => l.token().text().ends_with('.'), + _ => false, + } + } else { + false + } + } + if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent) { + // As above + self.dot_receiver = method_call_expr + .expr() + .map(|e| e.syntax().text_range()) + .and_then(|r| find_node_with_range(original_file, r)); + self.is_call = true; + } + } +} + +fn find_node_with_range(syntax: &SyntaxNode, range: TextRange) -> Option { + find_covering_element(syntax, range).ancestors().find_map(N::cast) +} + +fn is_node(node: &SyntaxNode) -> bool { + match node.ancestors().find_map(N::cast) { + None => false, + Some(n) => n.syntax().text_range() == node.text_range(), + } +} diff --git a/crates/ide/src/completion/completion_item.rs b/crates/ide/src/completion/completion_item.rs new file mode 100644 index 000000000..9377cdc57 --- /dev/null +++ b/crates/ide/src/completion/completion_item.rs @@ -0,0 +1,384 @@ +//! FIXME: write short doc here + +use std::fmt; + +use hir::Documentation; +use syntax::TextRange; +use text_edit::TextEdit; + +use crate::completion::completion_config::SnippetCap; + +/// `CompletionItem` describes a single completion variant in the editor pop-up. +/// It is basically a POD with various properties. To construct a +/// `CompletionItem`, use `new` method and the `Builder` struct. +pub struct CompletionItem { + /// Used only internally in tests, to check only specific kind of + /// completion (postfix, keyword, reference, etc). + #[allow(unused)] + pub(crate) completion_kind: CompletionKind, + /// Label in the completion pop up which identifies completion. + label: String, + /// Range of identifier that is being completed. + /// + /// It should be used primarily for UI, but we also use this to convert + /// genetic TextEdit into LSP's completion edit (see conv.rs). + /// + /// `source_range` must contain the completion offset. `insert_text` should + /// start with what `source_range` points to, or VSCode will filter out the + /// completion silently. + source_range: TextRange, + /// What happens when user selects this item. + /// + /// Typically, replaces `source_range` with new identifier. + text_edit: TextEdit, + insert_text_format: InsertTextFormat, + + /// What item (struct, function, etc) are we completing. + kind: Option, + + /// Lookup is used to check if completion item indeed can complete current + /// ident. + /// + /// That is, in `foo.bar<|>` lookup of `abracadabra` will be accepted (it + /// contains `bar` sub sequence), and `quux` will rejected. + lookup: Option, + + /// Additional info to show in the UI pop up. + detail: Option, + documentation: Option, + + /// Whether this item is marked as deprecated + deprecated: bool, + + /// If completing a function call, ask the editor to show parameter popup + /// after completion. + trigger_call_info: bool, + + /// Score is useful to pre select or display in better order completion items + score: Option, +} + +// We use custom debug for CompletionItem to make snapshot tests more readable. +impl fmt::Debug for CompletionItem { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut s = f.debug_struct("CompletionItem"); + s.field("label", &self.label()).field("source_range", &self.source_range()); + if self.text_edit().len() == 1 { + let atom = &self.text_edit().iter().next().unwrap(); + s.field("delete", &atom.delete); + s.field("insert", &atom.insert); + } else { + s.field("text_edit", &self.text_edit); + } + if let Some(kind) = self.kind().as_ref() { + s.field("kind", kind); + } + if self.lookup() != self.label() { + s.field("lookup", &self.lookup()); + } + if let Some(detail) = self.detail() { + s.field("detail", &detail); + } + if let Some(documentation) = self.documentation() { + s.field("documentation", &documentation); + } + if self.deprecated { + s.field("deprecated", &true); + } + if let Some(score) = &self.score { + s.field("score", score); + } + if self.trigger_call_info { + s.field("trigger_call_info", &true); + } + s.finish() + } +} + +#[derive(Debug, Clone, Copy, Ord, PartialOrd, Eq, PartialEq)] +pub enum CompletionScore { + /// If only type match + TypeMatch, + /// If type and name match + TypeAndNameMatch, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum CompletionItemKind { + Snippet, + Keyword, + Module, + Function, + BuiltinType, + Struct, + Enum, + EnumVariant, + Binding, + Field, + Static, + Const, + Trait, + TypeAlias, + Method, + TypeParam, + Macro, + Attribute, + UnresolvedReference, +} + +impl CompletionItemKind { + #[cfg(test)] + pub(crate) fn tag(&self) -> &'static str { + match self { + CompletionItemKind::Attribute => "at", + CompletionItemKind::Binding => "bn", + CompletionItemKind::BuiltinType => "bt", + CompletionItemKind::Const => "ct", + CompletionItemKind::Enum => "en", + CompletionItemKind::EnumVariant => "ev", + CompletionItemKind::Field => "fd", + CompletionItemKind::Function => "fn", + CompletionItemKind::Keyword => "kw", + CompletionItemKind::Macro => "ma", + CompletionItemKind::Method => "me", + CompletionItemKind::Module => "md", + CompletionItemKind::Snippet => "sn", + CompletionItemKind::Static => "sc", + CompletionItemKind::Struct => "st", + CompletionItemKind::Trait => "tt", + CompletionItemKind::TypeAlias => "ta", + CompletionItemKind::TypeParam => "tp", + CompletionItemKind::UnresolvedReference => "??", + } + } +} + +#[derive(Debug, PartialEq, Eq, Copy, Clone)] +pub(crate) enum CompletionKind { + /// Parser-based keyword completion. + Keyword, + /// Your usual "complete all valid identifiers". + Reference, + /// "Secret sauce" completions. + Magic, + Snippet, + Postfix, + BuiltinType, + Attribute, +} + +#[derive(Debug, PartialEq, Eq, Copy, Clone)] +pub enum InsertTextFormat { + PlainText, + Snippet, +} + +impl CompletionItem { + pub(crate) fn new( + completion_kind: CompletionKind, + source_range: TextRange, + label: impl Into, + ) -> Builder { + let label = label.into(); + Builder { + source_range, + completion_kind, + label, + insert_text: None, + insert_text_format: InsertTextFormat::PlainText, + detail: None, + documentation: None, + lookup: None, + kind: None, + text_edit: None, + deprecated: None, + trigger_call_info: None, + score: None, + } + } + /// What user sees in pop-up in the UI. + pub fn label(&self) -> &str { + &self.label + } + pub fn source_range(&self) -> TextRange { + self.source_range + } + + pub fn insert_text_format(&self) -> InsertTextFormat { + self.insert_text_format + } + + pub fn text_edit(&self) -> &TextEdit { + &self.text_edit + } + + /// Short one-line additional information, like a type + pub fn detail(&self) -> Option<&str> { + self.detail.as_deref() + } + /// A doc-comment + pub fn documentation(&self) -> Option { + self.documentation.clone() + } + /// What string is used for filtering. + pub fn lookup(&self) -> &str { + self.lookup.as_deref().unwrap_or(&self.label) + } + + pub fn kind(&self) -> Option { + self.kind + } + + pub fn deprecated(&self) -> bool { + self.deprecated + } + + pub fn score(&self) -> Option { + self.score + } + + pub fn trigger_call_info(&self) -> bool { + self.trigger_call_info + } +} + +/// A helper to make `CompletionItem`s. +#[must_use] +pub(crate) struct Builder { + source_range: TextRange, + completion_kind: CompletionKind, + label: String, + insert_text: Option, + insert_text_format: InsertTextFormat, + detail: Option, + documentation: Option, + lookup: Option, + kind: Option, + text_edit: Option, + deprecated: Option, + trigger_call_info: Option, + score: Option, +} + +impl Builder { + pub(crate) fn add_to(self, acc: &mut Completions) { + acc.add(self.build()) + } + + pub(crate) fn build(self) -> CompletionItem { + let label = self.label; + let text_edit = match self.text_edit { + Some(it) => it, + None => TextEdit::replace( + self.source_range, + self.insert_text.unwrap_or_else(|| label.clone()), + ), + }; + + CompletionItem { + source_range: self.source_range, + label, + insert_text_format: self.insert_text_format, + text_edit, + detail: self.detail, + documentation: self.documentation, + lookup: self.lookup, + kind: self.kind, + completion_kind: self.completion_kind, + deprecated: self.deprecated.unwrap_or(false), + trigger_call_info: self.trigger_call_info.unwrap_or(false), + score: self.score, + } + } + pub(crate) fn lookup_by(mut self, lookup: impl Into) -> Builder { + self.lookup = Some(lookup.into()); + self + } + pub(crate) fn label(mut self, label: impl Into) -> Builder { + self.label = label.into(); + self + } + pub(crate) fn insert_text(mut self, insert_text: impl Into) -> Builder { + self.insert_text = Some(insert_text.into()); + self + } + pub(crate) fn insert_snippet( + mut self, + _cap: SnippetCap, + snippet: impl Into, + ) -> Builder { + self.insert_text_format = InsertTextFormat::Snippet; + self.insert_text(snippet) + } + pub(crate) fn kind(mut self, kind: CompletionItemKind) -> Builder { + self.kind = Some(kind); + self + } + pub(crate) fn text_edit(mut self, edit: TextEdit) -> Builder { + self.text_edit = Some(edit); + self + } + pub(crate) fn snippet_edit(mut self, _cap: SnippetCap, edit: TextEdit) -> Builder { + self.insert_text_format = InsertTextFormat::Snippet; + self.text_edit(edit) + } + #[allow(unused)] + pub(crate) fn detail(self, detail: impl Into) -> Builder { + self.set_detail(Some(detail)) + } + pub(crate) fn set_detail(mut self, detail: Option>) -> Builder { + self.detail = detail.map(Into::into); + self + } + #[allow(unused)] + pub(crate) fn documentation(self, docs: Documentation) -> Builder { + self.set_documentation(Some(docs)) + } + pub(crate) fn set_documentation(mut self, docs: Option) -> Builder { + self.documentation = docs.map(Into::into); + self + } + pub(crate) fn set_deprecated(mut self, deprecated: bool) -> Builder { + self.deprecated = Some(deprecated); + self + } + pub(crate) fn set_score(mut self, score: CompletionScore) -> Builder { + self.score = Some(score); + self + } + pub(crate) fn trigger_call_info(mut self) -> Builder { + self.trigger_call_info = Some(true); + self + } +} + +impl<'a> Into for Builder { + fn into(self) -> CompletionItem { + self.build() + } +} + +/// Represents an in-progress set of completions being built. +#[derive(Debug, Default)] +pub(crate) struct Completions { + buf: Vec, +} + +impl Completions { + pub(crate) fn add(&mut self, item: impl Into) { + self.buf.push(item.into()) + } + pub(crate) fn add_all(&mut self, items: I) + where + I: IntoIterator, + I::Item: Into, + { + items.into_iter().for_each(|item| self.add(item.into())) + } +} + +impl Into> for Completions { + fn into(self) -> Vec { + self.buf + } +} diff --git a/crates/ide/src/completion/patterns.rs b/crates/ide/src/completion/patterns.rs new file mode 100644 index 000000000..ffc97c076 --- /dev/null +++ b/crates/ide/src/completion/patterns.rs @@ -0,0 +1,194 @@ +//! Patterns telling us certain facts about current syntax element, they are used in completion context + +use syntax::{ + algo::non_trivia_sibling, + ast::{self, LoopBodyOwner}, + match_ast, AstNode, Direction, NodeOrToken, SyntaxElement, + SyntaxKind::*, + SyntaxNode, SyntaxToken, +}; + +#[cfg(test)] +use crate::completion::test_utils::check_pattern_is_applicable; + +pub(crate) fn has_trait_parent(element: SyntaxElement) -> bool { + not_same_range_ancestor(element) + .filter(|it| it.kind() == ASSOC_ITEM_LIST) + .and_then(|it| it.parent()) + .filter(|it| it.kind() == TRAIT) + .is_some() +} +#[test] +fn test_has_trait_parent() { + check_pattern_is_applicable(r"trait A { f<|> }", has_trait_parent); +} + +pub(crate) fn has_impl_parent(element: SyntaxElement) -> bool { + not_same_range_ancestor(element) + .filter(|it| it.kind() == ASSOC_ITEM_LIST) + .and_then(|it| it.parent()) + .filter(|it| it.kind() == IMPL) + .is_some() +} +#[test] +fn test_has_impl_parent() { + check_pattern_is_applicable(r"impl A { f<|> }", has_impl_parent); +} + +pub(crate) fn has_block_expr_parent(element: SyntaxElement) -> bool { + not_same_range_ancestor(element).filter(|it| it.kind() == BLOCK_EXPR).is_some() +} +#[test] +fn test_has_block_expr_parent() { + check_pattern_is_applicable(r"fn my_fn() { let a = 2; f<|> }", has_block_expr_parent); +} + +pub(crate) fn has_bind_pat_parent(element: SyntaxElement) -> bool { + element.ancestors().find(|it| it.kind() == IDENT_PAT).is_some() +} +#[test] +fn test_has_bind_pat_parent() { + check_pattern_is_applicable(r"fn my_fn(m<|>) {}", has_bind_pat_parent); + check_pattern_is_applicable(r"fn my_fn() { let m<|> }", has_bind_pat_parent); +} + +pub(crate) fn has_ref_parent(element: SyntaxElement) -> bool { + not_same_range_ancestor(element) + .filter(|it| it.kind() == REF_PAT || it.kind() == REF_EXPR) + .is_some() +} +#[test] +fn test_has_ref_parent() { + check_pattern_is_applicable(r"fn my_fn(&m<|>) {}", has_ref_parent); + check_pattern_is_applicable(r"fn my() { let &m<|> }", has_ref_parent); +} + +pub(crate) fn has_item_list_or_source_file_parent(element: SyntaxElement) -> bool { + let ancestor = not_same_range_ancestor(element); + if !ancestor.is_some() { + return true; + } + ancestor.filter(|it| it.kind() == SOURCE_FILE || it.kind() == ITEM_LIST).is_some() +} +#[test] +fn test_has_item_list_or_source_file_parent() { + check_pattern_is_applicable(r"i<|>", has_item_list_or_source_file_parent); + check_pattern_is_applicable(r"mod foo { f<|> }", has_item_list_or_source_file_parent); +} + +pub(crate) fn is_match_arm(element: SyntaxElement) -> bool { + not_same_range_ancestor(element.clone()).filter(|it| it.kind() == MATCH_ARM).is_some() + && previous_sibling_or_ancestor_sibling(element) + .and_then(|it| it.into_token()) + .filter(|it| it.kind() == FAT_ARROW) + .is_some() +} +#[test] +fn test_is_match_arm() { + check_pattern_is_applicable(r"fn my_fn() { match () { () => m<|> } }", is_match_arm); +} + +pub(crate) fn unsafe_is_prev(element: SyntaxElement) -> bool { + element + .into_token() + .and_then(|it| previous_non_trivia_token(it)) + .filter(|it| it.kind() == UNSAFE_KW) + .is_some() +} +#[test] +fn test_unsafe_is_prev() { + check_pattern_is_applicable(r"unsafe i<|>", unsafe_is_prev); +} + +pub(crate) fn if_is_prev(element: SyntaxElement) -> bool { + element + .into_token() + .and_then(|it| previous_non_trivia_token(it)) + .filter(|it| it.kind() == IF_KW) + .is_some() +} +#[test] +fn test_if_is_prev() { + check_pattern_is_applicable(r"if l<|>", if_is_prev); +} + +pub(crate) fn has_trait_as_prev_sibling(element: SyntaxElement) -> bool { + previous_sibling_or_ancestor_sibling(element).filter(|it| it.kind() == TRAIT).is_some() +} +#[test] +fn test_has_trait_as_prev_sibling() { + check_pattern_is_applicable(r"trait A w<|> {}", has_trait_as_prev_sibling); +} + +pub(crate) fn has_impl_as_prev_sibling(element: SyntaxElement) -> bool { + previous_sibling_or_ancestor_sibling(element).filter(|it| it.kind() == IMPL).is_some() +} +#[test] +fn test_has_impl_as_prev_sibling() { + check_pattern_is_applicable(r"impl A w<|> {}", has_impl_as_prev_sibling); +} + +pub(crate) fn is_in_loop_body(element: SyntaxElement) -> bool { + let leaf = match element { + NodeOrToken::Node(node) => node, + NodeOrToken::Token(token) => token.parent(), + }; + for node in leaf.ancestors() { + if node.kind() == FN || node.kind() == CLOSURE_EXPR { + break; + } + let loop_body = match_ast! { + match node { + ast::ForExpr(it) => it.loop_body(), + ast::WhileExpr(it) => it.loop_body(), + ast::LoopExpr(it) => it.loop_body(), + _ => None, + } + }; + if let Some(body) = loop_body { + if body.syntax().text_range().contains_range(leaf.text_range()) { + return true; + } + } + } + false +} + +fn not_same_range_ancestor(element: SyntaxElement) -> Option { + element + .ancestors() + .take_while(|it| it.text_range() == element.text_range()) + .last() + .and_then(|it| it.parent()) +} + +fn previous_non_trivia_token(token: SyntaxToken) -> Option { + let mut token = token.prev_token(); + while let Some(inner) = token.clone() { + if !inner.kind().is_trivia() { + return Some(inner); + } else { + token = inner.prev_token(); + } + } + None +} + +fn previous_sibling_or_ancestor_sibling(element: SyntaxElement) -> Option { + let token_sibling = non_trivia_sibling(element.clone(), Direction::Prev); + if let Some(sibling) = token_sibling { + Some(sibling) + } else { + // if not trying to find first ancestor which has such a sibling + let node = match element { + NodeOrToken::Node(node) => node, + NodeOrToken::Token(token) => token.parent(), + }; + let range = node.text_range(); + let top_node = node.ancestors().take_while(|it| it.text_range() == range).last()?; + let prev_sibling_node = top_node.ancestors().find(|it| { + non_trivia_sibling(NodeOrToken::Node(it.to_owned()), Direction::Prev).is_some() + })?; + non_trivia_sibling(NodeOrToken::Node(prev_sibling_node), Direction::Prev) + } +} diff --git a/crates/ide/src/completion/presentation.rs b/crates/ide/src/completion/presentation.rs new file mode 100644 index 000000000..e1b1ea4ce --- /dev/null +++ b/crates/ide/src/completion/presentation.rs @@ -0,0 +1,1229 @@ +//! This modules takes care of rendering various definitions as completion items. +//! It also handles scoring (sorting) completions. + +use hir::{Docs, HasAttrs, HasSource, HirDisplay, ModPath, ScopeDef, StructKind, Type}; +use itertools::Itertools; +use syntax::ast::NameOwner; +use test_utils::mark; + +use crate::{ + completion::{ + completion_item::Builder, CompletionContext, CompletionItem, CompletionItemKind, + CompletionKind, Completions, + }, + display::{const_label, function_declaration, macro_label, type_label}, + CompletionScore, RootDatabase, +}; + +impl Completions { + pub(crate) fn add_field(&mut self, ctx: &CompletionContext, field: hir::Field, ty: &Type) { + let is_deprecated = is_deprecated(field, ctx.db); + let name = field.name(ctx.db); + let mut completion_item = + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.to_string()) + .kind(CompletionItemKind::Field) + .detail(ty.display(ctx.db).to_string()) + .set_documentation(field.docs(ctx.db)) + .set_deprecated(is_deprecated); + + if let Some(score) = compute_score(ctx, &ty, &name.to_string()) { + completion_item = completion_item.set_score(score); + } + + completion_item.add_to(self); + } + + pub(crate) fn add_tuple_field(&mut self, ctx: &CompletionContext, field: usize, ty: &Type) { + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), field.to_string()) + .kind(CompletionItemKind::Field) + .detail(ty.display(ctx.db).to_string()) + .add_to(self); + } + + pub(crate) fn add_resolution( + &mut self, + ctx: &CompletionContext, + local_name: String, + resolution: &ScopeDef, + ) { + use hir::ModuleDef::*; + + let completion_kind = match resolution { + ScopeDef::ModuleDef(BuiltinType(..)) => CompletionKind::BuiltinType, + _ => CompletionKind::Reference, + }; + + let kind = match resolution { + ScopeDef::ModuleDef(Module(..)) => CompletionItemKind::Module, + ScopeDef::ModuleDef(Function(func)) => { + return self.add_function(ctx, *func, Some(local_name)); + } + ScopeDef::ModuleDef(Adt(hir::Adt::Struct(_))) => CompletionItemKind::Struct, + // FIXME: add CompletionItemKind::Union + ScopeDef::ModuleDef(Adt(hir::Adt::Union(_))) => CompletionItemKind::Struct, + ScopeDef::ModuleDef(Adt(hir::Adt::Enum(_))) => CompletionItemKind::Enum, + + ScopeDef::ModuleDef(EnumVariant(var)) => { + return self.add_enum_variant(ctx, *var, Some(local_name)); + } + ScopeDef::ModuleDef(Const(..)) => CompletionItemKind::Const, + ScopeDef::ModuleDef(Static(..)) => CompletionItemKind::Static, + ScopeDef::ModuleDef(Trait(..)) => CompletionItemKind::Trait, + ScopeDef::ModuleDef(TypeAlias(..)) => CompletionItemKind::TypeAlias, + ScopeDef::ModuleDef(BuiltinType(..)) => CompletionItemKind::BuiltinType, + ScopeDef::GenericParam(..) => CompletionItemKind::TypeParam, + ScopeDef::Local(..) => CompletionItemKind::Binding, + // (does this need its own kind?) + ScopeDef::AdtSelfType(..) | ScopeDef::ImplSelfType(..) => CompletionItemKind::TypeParam, + ScopeDef::MacroDef(mac) => { + return self.add_macro(ctx, Some(local_name), *mac); + } + ScopeDef::Unknown => { + return self.add( + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), local_name) + .kind(CompletionItemKind::UnresolvedReference), + ); + } + }; + + let docs = match resolution { + ScopeDef::ModuleDef(Module(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(Adt(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(EnumVariant(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(Const(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(Static(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(Trait(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(TypeAlias(it)) => it.docs(ctx.db), + _ => None, + }; + + let mut completion_item = + CompletionItem::new(completion_kind, ctx.source_range(), local_name.clone()); + if let ScopeDef::Local(local) = resolution { + let ty = local.ty(ctx.db); + if !ty.is_unknown() { + completion_item = completion_item.detail(ty.display(ctx.db).to_string()); + } + }; + + if let ScopeDef::Local(local) = resolution { + if let Some(score) = compute_score(ctx, &local.ty(ctx.db), &local_name) { + completion_item = completion_item.set_score(score); + } + } + + // Add `<>` for generic types + if ctx.is_path_type && !ctx.has_type_args && ctx.config.add_call_parenthesis { + if let Some(cap) = ctx.config.snippet_cap { + let has_non_default_type_params = match resolution { + ScopeDef::ModuleDef(Adt(it)) => it.has_non_default_type_params(ctx.db), + ScopeDef::ModuleDef(TypeAlias(it)) => it.has_non_default_type_params(ctx.db), + _ => false, + }; + if has_non_default_type_params { + mark::hit!(inserts_angle_brackets_for_generics); + completion_item = completion_item + .lookup_by(local_name.clone()) + .label(format!("{}<…>", local_name)) + .insert_snippet(cap, format!("{}<$0>", local_name)); + } + } + } + + completion_item.kind(kind).set_documentation(docs).add_to(self) + } + + pub(crate) fn add_macro( + &mut self, + ctx: &CompletionContext, + name: Option, + macro_: hir::MacroDef, + ) { + // FIXME: Currently proc-macro do not have ast-node, + // such that it does not have source + if macro_.is_proc_macro() { + return; + } + + let name = match name { + Some(it) => it, + None => return, + }; + + let ast_node = macro_.source(ctx.db).value; + let detail = macro_label(&ast_node); + + let docs = macro_.docs(ctx.db); + + let mut builder = CompletionItem::new( + CompletionKind::Reference, + ctx.source_range(), + &format!("{}!", name), + ) + .kind(CompletionItemKind::Macro) + .set_documentation(docs.clone()) + .set_deprecated(is_deprecated(macro_, ctx.db)) + .detail(detail); + + let needs_bang = ctx.use_item_syntax.is_none() && !ctx.is_macro_call; + builder = match ctx.config.snippet_cap { + Some(cap) if needs_bang => { + let docs = docs.as_ref().map_or("", |s| s.as_str()); + let (bra, ket) = guess_macro_braces(&name, docs); + builder + .insert_snippet(cap, format!("{}!{}$0{}", name, bra, ket)) + .label(format!("{}!{}…{}", name, bra, ket)) + .lookup_by(format!("{}!", name)) + } + None if needs_bang => builder.insert_text(format!("{}!", name)), + _ => { + mark::hit!(dont_insert_macro_call_parens_unncessary); + builder.insert_text(name) + } + }; + + self.add(builder); + } + + pub(crate) fn add_function( + &mut self, + ctx: &CompletionContext, + func: hir::Function, + local_name: Option, + ) { + let has_self_param = func.has_self_param(ctx.db); + + let name = local_name.unwrap_or_else(|| func.name(ctx.db).to_string()); + let ast_node = func.source(ctx.db).value; + + let mut builder = + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.clone()) + .kind(if has_self_param { + CompletionItemKind::Method + } else { + CompletionItemKind::Function + }) + .set_documentation(func.docs(ctx.db)) + .set_deprecated(is_deprecated(func, ctx.db)) + .detail(function_declaration(&ast_node)); + + let params = ast_node + .param_list() + .into_iter() + .flat_map(|it| it.params()) + .flat_map(|it| it.pat()) + .map(|pat| pat.to_string().trim_start_matches('_').into()) + .collect(); + + builder = builder.add_call_parens(ctx, name, Params::Named(params)); + + self.add(builder) + } + + pub(crate) fn add_const(&mut self, ctx: &CompletionContext, constant: hir::Const) { + let ast_node = constant.source(ctx.db).value; + let name = match ast_node.name() { + Some(name) => name, + _ => return, + }; + let detail = const_label(&ast_node); + + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.text().to_string()) + .kind(CompletionItemKind::Const) + .set_documentation(constant.docs(ctx.db)) + .set_deprecated(is_deprecated(constant, ctx.db)) + .detail(detail) + .add_to(self); + } + + pub(crate) fn add_type_alias(&mut self, ctx: &CompletionContext, type_alias: hir::TypeAlias) { + let type_def = type_alias.source(ctx.db).value; + let name = match type_def.name() { + Some(name) => name, + _ => return, + }; + let detail = type_label(&type_def); + + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.text().to_string()) + .kind(CompletionItemKind::TypeAlias) + .set_documentation(type_alias.docs(ctx.db)) + .set_deprecated(is_deprecated(type_alias, ctx.db)) + .detail(detail) + .add_to(self); + } + + pub(crate) fn add_qualified_enum_variant( + &mut self, + ctx: &CompletionContext, + variant: hir::EnumVariant, + path: ModPath, + ) { + self.add_enum_variant_impl(ctx, variant, None, Some(path)) + } + + pub(crate) fn add_enum_variant( + &mut self, + ctx: &CompletionContext, + variant: hir::EnumVariant, + local_name: Option, + ) { + self.add_enum_variant_impl(ctx, variant, local_name, None) + } + + fn add_enum_variant_impl( + &mut self, + ctx: &CompletionContext, + variant: hir::EnumVariant, + local_name: Option, + path: Option, + ) { + let is_deprecated = is_deprecated(variant, ctx.db); + let name = local_name.unwrap_or_else(|| variant.name(ctx.db).to_string()); + let qualified_name = match &path { + Some(it) => it.to_string(), + None => name.to_string(), + }; + let detail_types = variant + .fields(ctx.db) + .into_iter() + .map(|field| (field.name(ctx.db), field.signature_ty(ctx.db))); + let variant_kind = variant.kind(ctx.db); + let detail = match variant_kind { + StructKind::Tuple | StructKind::Unit => format!( + "({})", + detail_types.map(|(_, t)| t.display(ctx.db).to_string()).format(", ") + ), + StructKind::Record => format!( + "{{ {} }}", + detail_types + .map(|(n, t)| format!("{}: {}", n, t.display(ctx.db).to_string())) + .format(", ") + ), + }; + let mut res = CompletionItem::new( + CompletionKind::Reference, + ctx.source_range(), + qualified_name.clone(), + ) + .kind(CompletionItemKind::EnumVariant) + .set_documentation(variant.docs(ctx.db)) + .set_deprecated(is_deprecated) + .detail(detail); + + if path.is_some() { + res = res.lookup_by(name); + } + + if variant_kind == StructKind::Tuple { + mark::hit!(inserts_parens_for_tuple_enums); + let params = Params::Anonymous(variant.fields(ctx.db).len()); + res = res.add_call_parens(ctx, qualified_name, params) + } + + res.add_to(self); + } +} + +pub(crate) fn compute_score( + ctx: &CompletionContext, + ty: &Type, + name: &str, +) -> Option { + let (active_name, active_type) = if let Some(record_field) = &ctx.record_field_syntax { + mark::hit!(record_field_type_match); + let (struct_field, _local) = ctx.sema.resolve_record_field(record_field)?; + (struct_field.name(ctx.db).to_string(), struct_field.signature_ty(ctx.db)) + } else if let Some(active_parameter) = &ctx.active_parameter { + mark::hit!(active_param_type_match); + (active_parameter.name.clone(), active_parameter.ty.clone()) + } else { + return None; + }; + + // Compute score + // For the same type + if &active_type != ty { + return None; + } + + let mut res = CompletionScore::TypeMatch; + + // If same type + same name then go top position + if active_name == name { + res = CompletionScore::TypeAndNameMatch + } + + Some(res) +} + +enum Params { + Named(Vec), + Anonymous(usize), +} + +impl Params { + fn len(&self) -> usize { + match self { + Params::Named(xs) => xs.len(), + Params::Anonymous(len) => *len, + } + } + + fn is_empty(&self) -> bool { + self.len() == 0 + } +} + +impl Builder { + fn add_call_parens(mut self, ctx: &CompletionContext, name: String, params: Params) -> Builder { + if !ctx.config.add_call_parenthesis { + return self; + } + if ctx.use_item_syntax.is_some() { + mark::hit!(no_parens_in_use_item); + return self; + } + if ctx.is_pattern_call { + mark::hit!(dont_duplicate_pattern_parens); + return self; + } + if ctx.is_call { + return self; + } + + // Don't add parentheses if the expected type is some function reference. + if let Some(ty) = &ctx.expected_type { + if ty.is_fn() { + mark::hit!(no_call_parens_if_fn_ptr_needed); + return self; + } + } + + let cap = match ctx.config.snippet_cap { + Some(it) => it, + None => return self, + }; + // If not an import, add parenthesis automatically. + mark::hit!(inserts_parens_for_function_calls); + + let (snippet, label) = if params.is_empty() { + (format!("{}()$0", name), format!("{}()", name)) + } else { + self = self.trigger_call_info(); + let snippet = match (ctx.config.add_call_argument_snippets, params) { + (true, Params::Named(params)) => { + let function_params_snippet = + params.iter().enumerate().format_with(", ", |(index, param_name), f| { + f(&format_args!("${{{}:{}}}", index + 1, param_name)) + }); + format!("{}({})$0", name, function_params_snippet) + } + _ => { + mark::hit!(suppress_arg_snippets); + format!("{}($0)", name) + } + }; + + (snippet, format!("{}(…)", name)) + }; + self.lookup_by(name).label(label).insert_snippet(cap, snippet) + } +} + +fn is_deprecated(node: impl HasAttrs, db: &RootDatabase) -> bool { + node.attrs(db).by_key("deprecated").exists() +} + +fn guess_macro_braces(macro_name: &str, docs: &str) -> (&'static str, &'static str) { + let mut votes = [0, 0, 0]; + for (idx, s) in docs.match_indices(¯o_name) { + let (before, after) = (&docs[..idx], &docs[idx + s.len()..]); + // Ensure to match the full word + if after.starts_with('!') + && !before.ends_with(|c: char| c == '_' || c.is_ascii_alphanumeric()) + { + // It may have spaces before the braces like `foo! {}` + match after[1..].chars().find(|&c| !c.is_whitespace()) { + Some('{') => votes[0] += 1, + Some('[') => votes[1] += 1, + Some('(') => votes[2] += 1, + _ => {} + } + } + } + + // Insert a space before `{}`. + // We prefer the last one when some votes equal. + let (_vote, (bra, ket)) = votes + .iter() + .zip(&[(" {", "}"), ("[", "]"), ("(", ")")]) + .max_by_key(|&(&vote, _)| vote) + .unwrap(); + (*bra, *ket) +} + +#[cfg(test)] +mod tests { + use std::cmp::Reverse; + + use expect::{expect, Expect}; + use test_utils::mark; + + use crate::{ + completion::{ + test_utils::{ + check_edit, check_edit_with_config, do_completion, get_all_completion_items, + }, + CompletionConfig, CompletionKind, + }, + CompletionScore, + }; + + fn check(ra_fixture: &str, expect: Expect) { + let actual = do_completion(ra_fixture, CompletionKind::Reference); + expect.assert_debug_eq(&actual); + } + + fn check_scores(ra_fixture: &str, expect: Expect) { + fn display_score(score: Option) -> &'static str { + match score { + Some(CompletionScore::TypeMatch) => "[type]", + Some(CompletionScore::TypeAndNameMatch) => "[type+name]", + None => "[]".into(), + } + } + + let mut completions = get_all_completion_items(CompletionConfig::default(), ra_fixture); + completions.sort_by_key(|it| (Reverse(it.score()), it.label().to_string())); + let actual = completions + .into_iter() + .filter(|it| it.completion_kind == CompletionKind::Reference) + .map(|it| { + let tag = it.kind().unwrap().tag(); + let score = display_score(it.score()); + format!("{} {} {}\n", tag, it.label(), score) + }) + .collect::(); + expect.assert_eq(&actual); + } + + #[test] + fn enum_detail_includes_record_fields() { + check( + r#" +enum Foo { Foo { x: i32, y: i32 } } + +fn main() { Foo::Fo<|> } +"#, + expect![[r#" + [ + CompletionItem { + label: "Foo", + source_range: 54..56, + delete: 54..56, + insert: "Foo", + kind: EnumVariant, + detail: "{ x: i32, y: i32 }", + }, + ] + "#]], + ); + } + + #[test] + fn enum_detail_doesnt_include_tuple_fields() { + check( + r#" +enum Foo { Foo (i32, i32) } + +fn main() { Foo::Fo<|> } +"#, + expect![[r#" + [ + CompletionItem { + label: "Foo(…)", + source_range: 46..48, + delete: 46..48, + insert: "Foo($0)", + kind: EnumVariant, + lookup: "Foo", + detail: "(i32, i32)", + trigger_call_info: true, + }, + ] + "#]], + ); + } + + #[test] + fn enum_detail_just_parentheses_for_unit() { + check( + r#" +enum Foo { Foo } + +fn main() { Foo::Fo<|> } +"#, + expect![[r#" + [ + CompletionItem { + label: "Foo", + source_range: 35..37, + delete: 35..37, + insert: "Foo", + kind: EnumVariant, + detail: "()", + }, + ] + "#]], + ); + } + + #[test] + fn sets_deprecated_flag_in_completion_items() { + check( + r#" +#[deprecated] +fn something_deprecated() {} +#[deprecated(since = "1.0.0")] +fn something_else_deprecated() {} + +fn main() { som<|> } +"#, + expect![[r#" + [ + CompletionItem { + label: "main()", + source_range: 121..124, + delete: 121..124, + insert: "main()$0", + kind: Function, + lookup: "main", + detail: "fn main()", + }, + CompletionItem { + label: "something_deprecated()", + source_range: 121..124, + delete: 121..124, + insert: "something_deprecated()$0", + kind: Function, + lookup: "something_deprecated", + detail: "fn something_deprecated()", + deprecated: true, + }, + CompletionItem { + label: "something_else_deprecated()", + source_range: 121..124, + delete: 121..124, + insert: "something_else_deprecated()$0", + kind: Function, + lookup: "something_else_deprecated", + detail: "fn something_else_deprecated()", + deprecated: true, + }, + ] + "#]], + ); + + check( + r#" +struct A { #[deprecated] the_field: u32 } +fn foo() { A { the<|> } } +"#, + expect![[r#" + [ + CompletionItem { + label: "the_field", + source_range: 57..60, + delete: 57..60, + insert: "the_field", + kind: Field, + detail: "u32", + deprecated: true, + }, + ] + "#]], + ); + } + + #[test] + fn renders_docs() { + check( + r#" +struct S { + /// Field docs + foo: +} +impl S { + /// Method docs + fn bar(self) { self.<|> } +}"#, + expect![[r#" + [ + CompletionItem { + label: "bar()", + source_range: 94..94, + delete: 94..94, + insert: "bar()$0", + kind: Method, + lookup: "bar", + detail: "fn bar(self)", + documentation: Documentation( + "Method docs", + ), + }, + CompletionItem { + label: "foo", + source_range: 94..94, + delete: 94..94, + insert: "foo", + kind: Field, + detail: "{unknown}", + documentation: Documentation( + "Field docs", + ), + }, + ] + "#]], + ); + + check( + r#" +use self::my<|>; + +/// mod docs +mod my { } + +/// enum docs +enum E { + /// variant docs + V +} +use self::E::*; +"#, + expect![[r#" + [ + CompletionItem { + label: "E", + source_range: 10..12, + delete: 10..12, + insert: "E", + kind: Enum, + documentation: Documentation( + "enum docs", + ), + }, + CompletionItem { + label: "V", + source_range: 10..12, + delete: 10..12, + insert: "V", + kind: EnumVariant, + detail: "()", + documentation: Documentation( + "variant docs", + ), + }, + CompletionItem { + label: "my", + source_range: 10..12, + delete: 10..12, + insert: "my", + kind: Module, + documentation: Documentation( + "mod docs", + ), + }, + ] + "#]], + ) + } + + #[test] + fn dont_render_attrs() { + check( + r#" +struct S; +impl S { + #[inline] + fn the_method(&self) { } +} +fn foo(s: S) { s.<|> } +"#, + expect![[r#" + [ + CompletionItem { + label: "the_method()", + source_range: 81..81, + delete: 81..81, + insert: "the_method()$0", + kind: Method, + lookup: "the_method", + detail: "fn the_method(&self)", + }, + ] + "#]], + ) + } + + #[test] + fn inserts_parens_for_function_calls() { + mark::check!(inserts_parens_for_function_calls); + check_edit( + "no_args", + r#" +fn no_args() {} +fn main() { no_<|> } +"#, + r#" +fn no_args() {} +fn main() { no_args()$0 } +"#, + ); + + check_edit( + "with_args", + r#" +fn with_args(x: i32, y: String) {} +fn main() { with_<|> } +"#, + r#" +fn with_args(x: i32, y: String) {} +fn main() { with_args(${1:x}, ${2:y})$0 } +"#, + ); + + check_edit( + "foo", + r#" +struct S; +impl S { + fn foo(&self) {} +} +fn bar(s: &S) { s.f<|> } +"#, + r#" +struct S; +impl S { + fn foo(&self) {} +} +fn bar(s: &S) { s.foo()$0 } +"#, + ); + + check_edit( + "foo", + r#" +struct S {} +impl S { + fn foo(&self, x: i32) {} +} +fn bar(s: &S) { + s.f<|> +} +"#, + r#" +struct S {} +impl S { + fn foo(&self, x: i32) {} +} +fn bar(s: &S) { + s.foo(${1:x})$0 +} +"#, + ); + } + + #[test] + fn suppress_arg_snippets() { + mark::check!(suppress_arg_snippets); + check_edit_with_config( + CompletionConfig { add_call_argument_snippets: false, ..CompletionConfig::default() }, + "with_args", + r#" +fn with_args(x: i32, y: String) {} +fn main() { with_<|> } +"#, + r#" +fn with_args(x: i32, y: String) {} +fn main() { with_args($0) } +"#, + ); + } + + #[test] + fn strips_underscores_from_args() { + check_edit( + "foo", + r#" +fn foo(_foo: i32, ___bar: bool, ho_ge_: String) {} +fn main() { f<|> } +"#, + r#" +fn foo(_foo: i32, ___bar: bool, ho_ge_: String) {} +fn main() { foo(${1:foo}, ${2:bar}, ${3:ho_ge_})$0 } +"#, + ); + } + + #[test] + fn inserts_parens_for_tuple_enums() { + mark::check!(inserts_parens_for_tuple_enums); + check_edit( + "Some", + r#" +enum Option { Some(T), None } +use Option::*; +fn main() -> Option { + Som<|> +} +"#, + r#" +enum Option { Some(T), None } +use Option::*; +fn main() -> Option { + Some($0) +} +"#, + ); + check_edit( + "Some", + r#" +enum Option { Some(T), None } +use Option::*; +fn main(value: Option) { + match value { + Som<|> + } +} +"#, + r#" +enum Option { Some(T), None } +use Option::*; +fn main(value: Option) { + match value { + Some($0) + } +} +"#, + ); + } + + #[test] + fn dont_duplicate_pattern_parens() { + mark::check!(dont_duplicate_pattern_parens); + check_edit( + "Var", + r#" +enum E { Var(i32) } +fn main() { + match E::Var(92) { + E::<|>(92) => (), + } +} +"#, + r#" +enum E { Var(i32) } +fn main() { + match E::Var(92) { + E::Var(92) => (), + } +} +"#, + ); + } + + #[test] + fn no_call_parens_if_fn_ptr_needed() { + mark::check!(no_call_parens_if_fn_ptr_needed); + check_edit( + "foo", + r#" +fn foo(foo: u8, bar: u8) {} +struct ManualVtable { f: fn(u8, u8) } + +fn main() -> ManualVtable { + ManualVtable { f: f<|> } +} +"#, + r#" +fn foo(foo: u8, bar: u8) {} +struct ManualVtable { f: fn(u8, u8) } + +fn main() -> ManualVtable { + ManualVtable { f: foo } +} +"#, + ); + } + + #[test] + fn no_parens_in_use_item() { + mark::check!(no_parens_in_use_item); + check_edit( + "foo", + r#" +mod m { pub fn foo() {} } +use crate::m::f<|>; +"#, + r#" +mod m { pub fn foo() {} } +use crate::m::foo; +"#, + ); + } + + #[test] + fn no_parens_in_call() { + check_edit( + "foo", + r#" +fn foo(x: i32) {} +fn main() { f<|>(); } +"#, + r#" +fn foo(x: i32) {} +fn main() { foo(); } +"#, + ); + check_edit( + "foo", + r#" +struct Foo; +impl Foo { fn foo(&self){} } +fn f(foo: &Foo) { foo.f<|>(); } +"#, + r#" +struct Foo; +impl Foo { fn foo(&self){} } +fn f(foo: &Foo) { foo.foo(); } +"#, + ); + } + + #[test] + fn inserts_angle_brackets_for_generics() { + mark::check!(inserts_angle_brackets_for_generics); + check_edit( + "Vec", + r#" +struct Vec {} +fn foo(xs: Ve<|>) +"#, + r#" +struct Vec {} +fn foo(xs: Vec<$0>) +"#, + ); + check_edit( + "Vec", + r#" +type Vec = (T,); +fn foo(xs: Ve<|>) +"#, + r#" +type Vec = (T,); +fn foo(xs: Vec<$0>) +"#, + ); + check_edit( + "Vec", + r#" +struct Vec {} +fn foo(xs: Ve<|>) +"#, + r#" +struct Vec {} +fn foo(xs: Vec) +"#, + ); + check_edit( + "Vec", + r#" +struct Vec {} +fn foo(xs: Ve<|>) +"#, + r#" +struct Vec {} +fn foo(xs: Vec) +"#, + ); + } + + #[test] + fn dont_insert_macro_call_parens_unncessary() { + mark::check!(dont_insert_macro_call_parens_unncessary); + check_edit( + "frobnicate!", + r#" +//- /main.rs +use foo::<|>; +//- /foo/lib.rs +#[macro_export] +macro_rules frobnicate { () => () } +"#, + r#" +use foo::frobnicate; +"#, + ); + + check_edit( + "frobnicate!", + r#" +macro_rules frobnicate { () => () } +fn main() { frob<|>!(); } +"#, + r#" +macro_rules frobnicate { () => () } +fn main() { frobnicate!(); } +"#, + ); + } + + #[test] + fn active_param_score() { + mark::check!(active_param_type_match); + check_scores( + r#" +struct S { foo: i64, bar: u32, baz: u32 } +fn test(bar: u32) { } +fn foo(s: S) { test(s.<|>) } +"#, + expect![[r#" + fd bar [type+name] + fd baz [type] + fd foo [] + "#]], + ); + } + + #[test] + fn record_field_scores() { + mark::check!(record_field_type_match); + check_scores( + r#" +struct A { foo: i64, bar: u32, baz: u32 } +struct B { x: (), y: f32, bar: u32 } +fn foo(a: A) { B { bar: a.<|> }; } +"#, + expect![[r#" + fd bar [type+name] + fd baz [type] + fd foo [] + "#]], + ) + } + + #[test] + fn record_field_and_call_scores() { + check_scores( + r#" +struct A { foo: i64, bar: u32, baz: u32 } +struct B { x: (), y: f32, bar: u32 } +fn f(foo: i64) { } +fn foo(a: A) { B { bar: f(a.<|>) }; } +"#, + expect![[r#" + fd foo [type+name] + fd bar [] + fd baz [] + "#]], + ); + check_scores( + r#" +struct A { foo: i64, bar: u32, baz: u32 } +struct B { x: (), y: f32, bar: u32 } +fn f(foo: i64) { } +fn foo(a: A) { f(B { bar: a.<|> }); } +"#, + expect![[r#" + fd bar [type+name] + fd baz [type] + fd foo [] + "#]], + ); + } + + #[test] + fn prioritize_exact_ref_match() { + check_scores( + r#" +struct WorldSnapshot { _f: () }; +fn go(world: &WorldSnapshot) { go(w<|>) } +"#, + expect![[r#" + bn world [type+name] + st WorldSnapshot [] + fn go(…) [] + "#]], + ); + } + + #[test] + fn too_many_arguments() { + mark::check!(too_many_arguments); + check_scores( + r#" +struct Foo; +fn f(foo: &Foo) { f(foo, w<|>) } +"#, + expect![[r#" + st Foo [] + fn f(…) [] + bn foo [] + "#]], + ); + } + + #[test] + fn guesses_macro_braces() { + check_edit( + "vec!", + r#" +/// Creates a [`Vec`] containing the arguments. +/// +/// ``` +/// let v = vec![1, 2, 3]; +/// assert_eq!(v[0], 1); +/// assert_eq!(v[1], 2); +/// assert_eq!(v[2], 3); +/// ``` +macro_rules! vec { () => {} } + +fn fn main() { v<|> } +"#, + r#" +/// Creates a [`Vec`] containing the arguments. +/// +/// ``` +/// let v = vec![1, 2, 3]; +/// assert_eq!(v[0], 1); +/// assert_eq!(v[1], 2); +/// assert_eq!(v[2], 3); +/// ``` +macro_rules! vec { () => {} } + +fn fn main() { vec![$0] } +"#, + ); + + check_edit( + "foo!", + r#" +/// Foo +/// +/// Don't call `fooo!()` `fooo!()`, or `_foo![]` `_foo![]`, +/// call as `let _=foo! { hello world };` +macro_rules! foo { () => {} } +fn main() { <|> } +"#, + r#" +/// Foo +/// +/// Don't call `fooo!()` `fooo!()`, or `_foo![]` `_foo![]`, +/// call as `let _=foo! { hello world };` +macro_rules! foo { () => {} } +fn main() { foo! {$0} } +"#, + ) + } +} diff --git a/crates/ide/src/completion/test_utils.rs b/crates/ide/src/completion/test_utils.rs new file mode 100644 index 000000000..1452d7e9e --- /dev/null +++ b/crates/ide/src/completion/test_utils.rs @@ -0,0 +1,114 @@ +//! Runs completion for testing purposes. + +use hir::Semantics; +use itertools::Itertools; +use stdx::{format_to, trim_indent}; +use syntax::{AstNode, NodeOrToken, SyntaxElement}; +use test_utils::assert_eq_text; + +use crate::{ + completion::{completion_item::CompletionKind, CompletionConfig}, + mock_analysis::analysis_and_position, + CompletionItem, +}; + +pub(crate) fn do_completion(code: &str, kind: CompletionKind) -> Vec { + do_completion_with_config(CompletionConfig::default(), code, kind) +} + +pub(crate) fn do_completion_with_config( + config: CompletionConfig, + code: &str, + kind: CompletionKind, +) -> Vec { + let mut kind_completions: Vec = get_all_completion_items(config, code) + .into_iter() + .filter(|c| c.completion_kind == kind) + .collect(); + kind_completions.sort_by(|l, r| l.label().cmp(r.label())); + kind_completions +} + +pub(crate) fn completion_list(code: &str, kind: CompletionKind) -> String { + completion_list_with_config(CompletionConfig::default(), code, kind) +} + +pub(crate) fn completion_list_with_config( + config: CompletionConfig, + code: &str, + kind: CompletionKind, +) -> String { + let mut kind_completions: Vec = get_all_completion_items(config, code) + .into_iter() + .filter(|c| c.completion_kind == kind) + .collect(); + kind_completions.sort_by_key(|c| c.label().to_owned()); + let label_width = kind_completions + .iter() + .map(|it| monospace_width(it.label())) + .max() + .unwrap_or_default() + .min(16); + kind_completions + .into_iter() + .map(|it| { + let tag = it.kind().unwrap().tag(); + let var_name = format!("{} {}", tag, it.label()); + let mut buf = var_name; + if let Some(detail) = it.detail() { + let width = label_width.saturating_sub(monospace_width(it.label())); + format_to!(buf, "{:width$} {}", "", detail, width = width); + } + format_to!(buf, "\n"); + buf + }) + .collect() +} + +fn monospace_width(s: &str) -> usize { + s.chars().count() +} + +pub(crate) fn check_edit(what: &str, ra_fixture_before: &str, ra_fixture_after: &str) { + check_edit_with_config(CompletionConfig::default(), what, ra_fixture_before, ra_fixture_after) +} + +pub(crate) fn check_edit_with_config( + config: CompletionConfig, + what: &str, + ra_fixture_before: &str, + ra_fixture_after: &str, +) { + let ra_fixture_after = trim_indent(ra_fixture_after); + let (analysis, position) = analysis_and_position(ra_fixture_before); + let completions: Vec = + analysis.completions(&config, position).unwrap().unwrap().into(); + let (completion,) = completions + .iter() + .filter(|it| it.lookup() == what) + .collect_tuple() + .unwrap_or_else(|| panic!("can't find {:?} completion in {:#?}", what, completions)); + let mut actual = analysis.file_text(position.file_id).unwrap().to_string(); + completion.text_edit().apply(&mut actual); + assert_eq_text!(&ra_fixture_after, &actual) +} + +pub(crate) fn check_pattern_is_applicable(code: &str, check: fn(SyntaxElement) -> bool) { + let (analysis, pos) = analysis_and_position(code); + analysis + .with_db(|db| { + let sema = Semantics::new(db); + let original_file = sema.parse(pos.file_id); + let token = original_file.syntax().token_at_offset(pos.offset).left_biased().unwrap(); + assert!(check(NodeOrToken::Token(token))); + }) + .unwrap(); +} + +pub(crate) fn get_all_completion_items( + config: CompletionConfig, + code: &str, +) -> Vec { + let (analysis, position) = analysis_and_position(code); + analysis.completions(&config, position).unwrap().unwrap().into() +} diff --git a/crates/ide/src/diagnostics.rs b/crates/ide/src/diagnostics.rs new file mode 100644 index 000000000..a3ec98178 --- /dev/null +++ b/crates/ide/src/diagnostics.rs @@ -0,0 +1,678 @@ +//! Collects diagnostics & fixits for a single file. +//! +//! The tricky bit here is that diagnostics are produced by hir in terms of +//! macro-expanded files, but we need to present them to the users in terms of +//! original files. So we need to map the ranges. + +use std::cell::RefCell; + +use base_db::SourceDatabase; +use hir::{diagnostics::DiagnosticSinkBuilder, Semantics}; +use ide_db::RootDatabase; +use itertools::Itertools; +use syntax::{ + ast::{self, AstNode}, + SyntaxNode, TextRange, T, +}; +use text_edit::TextEdit; + +use crate::{Diagnostic, FileId, Fix, SourceFileEdit}; + +mod diagnostics_with_fix; +use diagnostics_with_fix::DiagnosticWithFix; + +#[derive(Debug, Copy, Clone)] +pub enum Severity { + Error, + WeakWarning, +} + +pub(crate) fn diagnostics( + db: &RootDatabase, + file_id: FileId, + enable_experimental: bool, +) -> Vec { + let _p = profile::span("diagnostics"); + let sema = Semantics::new(db); + let parse = db.parse(file_id); + let mut res = Vec::new(); + + // [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily. + res.extend(parse.errors().iter().take(128).map(|err| Diagnostic { + range: err.range(), + message: format!("Syntax Error: {}", err), + severity: Severity::Error, + fix: None, + })); + + for node in parse.tree().syntax().descendants() { + check_unnecessary_braces_in_use_statement(&mut res, file_id, &node); + check_struct_shorthand_initialization(&mut res, file_id, &node); + } + let res = RefCell::new(res); + let mut sink = DiagnosticSinkBuilder::new() + .on::(|d| { + res.borrow_mut().push(diagnostic_with_fix(d, &sema)); + }) + .on::(|d| { + res.borrow_mut().push(diagnostic_with_fix(d, &sema)); + }) + .on::(|d| { + res.borrow_mut().push(diagnostic_with_fix(d, &sema)); + }) + .on::(|d| { + res.borrow_mut().push(diagnostic_with_fix(d, &sema)); + }) + // Only collect experimental diagnostics when they're enabled. + .filter(|diag| !diag.is_experimental() || enable_experimental) + // Diagnostics not handled above get no fix and default treatment. + .build(|d| { + res.borrow_mut().push(Diagnostic { + message: d.message(), + range: sema.diagnostics_display_range(d).range, + severity: Severity::Error, + fix: None, + }) + }); + + if let Some(m) = sema.to_module_def(file_id) { + m.diagnostics(db, &mut sink); + }; + drop(sink); + res.into_inner() +} + +fn diagnostic_with_fix(d: &D, sema: &Semantics) -> Diagnostic { + Diagnostic { + range: sema.diagnostics_display_range(d).range, + message: d.message(), + severity: Severity::Error, + fix: d.fix(&sema), + } +} + +fn check_unnecessary_braces_in_use_statement( + acc: &mut Vec, + file_id: FileId, + node: &SyntaxNode, +) -> Option<()> { + let use_tree_list = ast::UseTreeList::cast(node.clone())?; + if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() { + let use_range = use_tree_list.syntax().text_range(); + let edit = + text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(&single_use_tree) + .unwrap_or_else(|| { + let to_replace = single_use_tree.syntax().text().to_string(); + let mut edit_builder = TextEdit::builder(); + edit_builder.delete(use_range); + edit_builder.insert(use_range.start(), to_replace); + edit_builder.finish() + }); + + acc.push(Diagnostic { + range: use_range, + message: "Unnecessary braces in use statement".to_string(), + severity: Severity::WeakWarning, + fix: Some(Fix::new( + "Remove unnecessary braces", + SourceFileEdit { file_id, edit }.into(), + use_range, + )), + }); + } + + Some(()) +} + +fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement( + single_use_tree: &ast::UseTree, +) -> Option { + let use_tree_list_node = single_use_tree.syntax().parent()?; + if single_use_tree.path()?.segment()?.syntax().first_child_or_token()?.kind() == T![self] { + let start = use_tree_list_node.prev_sibling_or_token()?.text_range().start(); + let end = use_tree_list_node.text_range().end(); + return Some(TextEdit::delete(TextRange::new(start, end))); + } + None +} + +fn check_struct_shorthand_initialization( + acc: &mut Vec, + file_id: FileId, + node: &SyntaxNode, +) -> Option<()> { + let record_lit = ast::RecordExpr::cast(node.clone())?; + let record_field_list = record_lit.record_expr_field_list()?; + for record_field in record_field_list.fields() { + if let (Some(name_ref), Some(expr)) = (record_field.name_ref(), record_field.expr()) { + let field_name = name_ref.syntax().text().to_string(); + let field_expr = expr.syntax().text().to_string(); + let field_name_is_tup_index = name_ref.as_tuple_field().is_some(); + if field_name == field_expr && !field_name_is_tup_index { + let mut edit_builder = TextEdit::builder(); + edit_builder.delete(record_field.syntax().text_range()); + edit_builder.insert(record_field.syntax().text_range().start(), field_name); + let edit = edit_builder.finish(); + + let field_range = record_field.syntax().text_range(); + acc.push(Diagnostic { + range: field_range, + message: "Shorthand struct initialization".to_string(), + severity: Severity::WeakWarning, + fix: Some(Fix::new( + "Use struct shorthand initialization", + SourceFileEdit { file_id, edit }.into(), + field_range, + )), + }); + } + } + } + Some(()) +} + +#[cfg(test)] +mod tests { + use stdx::trim_indent; + use test_utils::assert_eq_text; + + use crate::mock_analysis::{analysis_and_position, single_file, MockAnalysis}; + use expect::{expect, Expect}; + + /// Takes a multi-file input fixture with annotated cursor positions, + /// and checks that: + /// * a diagnostic is produced + /// * this diagnostic fix trigger range touches the input cursor position + /// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied + fn check_fix(ra_fixture_before: &str, ra_fixture_after: &str) { + let after = trim_indent(ra_fixture_after); + + let (analysis, file_position) = analysis_and_position(ra_fixture_before); + let diagnostic = analysis.diagnostics(file_position.file_id, true).unwrap().pop().unwrap(); + let mut fix = diagnostic.fix.unwrap(); + let edit = fix.source_change.source_file_edits.pop().unwrap().edit; + let target_file_contents = analysis.file_text(file_position.file_id).unwrap(); + let actual = { + let mut actual = target_file_contents.to_string(); + edit.apply(&mut actual); + actual + }; + + assert_eq_text!(&after, &actual); + assert!( + fix.fix_trigger_range.start() <= file_position.offset + && fix.fix_trigger_range.end() >= file_position.offset, + "diagnostic fix range {:?} does not touch cursor position {:?}", + fix.fix_trigger_range, + file_position.offset + ); + } + + /// Checks that a diagnostic applies to the file containing the `<|>` cursor marker + /// which has a fix that can apply to other files. + fn check_apply_diagnostic_fix_in_other_file(ra_fixture_before: &str, ra_fixture_after: &str) { + let ra_fixture_after = &trim_indent(ra_fixture_after); + let (analysis, file_pos) = analysis_and_position(ra_fixture_before); + let current_file_id = file_pos.file_id; + let diagnostic = analysis.diagnostics(current_file_id, true).unwrap().pop().unwrap(); + let mut fix = diagnostic.fix.unwrap(); + let edit = fix.source_change.source_file_edits.pop().unwrap(); + let changed_file_id = edit.file_id; + let before = analysis.file_text(changed_file_id).unwrap(); + let actual = { + let mut actual = before.to_string(); + edit.edit.apply(&mut actual); + actual + }; + assert_eq_text!(ra_fixture_after, &actual); + } + + /// Takes a multi-file input fixture with annotated cursor position and checks that no diagnostics + /// apply to the file containing the cursor. + fn check_no_diagnostics(ra_fixture: &str) { + let mock = MockAnalysis::with_files(ra_fixture); + let files = mock.files().map(|(it, _)| it).collect::>(); + let analysis = mock.analysis(); + let diagnostics = files + .into_iter() + .flat_map(|file_id| analysis.diagnostics(file_id, true).unwrap()) + .collect::>(); + assert_eq!(diagnostics.len(), 0, "unexpected diagnostics:\n{:#?}", diagnostics); + } + + fn check_expect(ra_fixture: &str, expect: Expect) { + let (analysis, file_id) = single_file(ra_fixture); + let diagnostics = analysis.diagnostics(file_id, true).unwrap(); + expect.assert_debug_eq(&diagnostics) + } + + #[test] + fn test_wrap_return_type() { + check_fix( + r#" +//- /main.rs +use core::result::Result::{self, Ok, Err}; + +fn div(x: i32, y: i32) -> Result { + if y == 0 { + return Err(()); + } + x / y<|> +} +//- /core/lib.rs +pub mod result { + pub enum Result { Ok(T), Err(E) } +} +"#, + r#" +use core::result::Result::{self, Ok, Err}; + +fn div(x: i32, y: i32) -> Result { + if y == 0 { + return Err(()); + } + Ok(x / y) +} +"#, + ); + } + + #[test] + fn test_wrap_return_type_handles_generic_functions() { + check_fix( + r#" +//- /main.rs +use core::result::Result::{self, Ok, Err}; + +fn div(x: T) -> Result { + if x == 0 { + return Err(7); + } + <|>x +} +//- /core/lib.rs +pub mod result { + pub enum Result { Ok(T), Err(E) } +} +"#, + r#" +use core::result::Result::{self, Ok, Err}; + +fn div(x: T) -> Result { + if x == 0 { + return Err(7); + } + Ok(x) +} +"#, + ); + } + + #[test] + fn test_wrap_return_type_handles_type_aliases() { + check_fix( + r#" +//- /main.rs +use core::result::Result::{self, Ok, Err}; + +type MyResult = Result; + +fn div(x: i32, y: i32) -> MyResult { + if y == 0 { + return Err(()); + } + x <|>/ y +} +//- /core/lib.rs +pub mod result { + pub enum Result { Ok(T), Err(E) } +} +"#, + r#" +use core::result::Result::{self, Ok, Err}; + +type MyResult = Result; + +fn div(x: i32, y: i32) -> MyResult { + if y == 0 { + return Err(()); + } + Ok(x / y) +} +"#, + ); + } + + #[test] + fn test_wrap_return_type_not_applicable_when_expr_type_does_not_match_ok_type() { + check_no_diagnostics( + r#" +//- /main.rs +use core::result::Result::{self, Ok, Err}; + +fn foo() -> Result<(), i32> { 0 } + +//- /core/lib.rs +pub mod result { + pub enum Result { Ok(T), Err(E) } +} +"#, + ); + } + + #[test] + fn test_wrap_return_type_not_applicable_when_return_type_is_not_result() { + check_no_diagnostics( + r#" +//- /main.rs +use core::result::Result::{self, Ok, Err}; + +enum SomeOtherEnum { Ok(i32), Err(String) } + +fn foo() -> SomeOtherEnum { 0 } + +//- /core/lib.rs +pub mod result { + pub enum Result { Ok(T), Err(E) } +} +"#, + ); + } + + #[test] + fn test_fill_struct_fields_empty() { + check_fix( + r#" +struct TestStruct { one: i32, two: i64 } + +fn test_fn() { + let s = TestStruct {<|>}; +} +"#, + r#" +struct TestStruct { one: i32, two: i64 } + +fn test_fn() { + let s = TestStruct { one: (), two: ()}; +} +"#, + ); + } + + #[test] + fn test_fill_struct_fields_self() { + check_fix( + r#" +struct TestStruct { one: i32 } + +impl TestStruct { + fn test_fn() { let s = Self {<|>}; } +} +"#, + r#" +struct TestStruct { one: i32 } + +impl TestStruct { + fn test_fn() { let s = Self { one: ()}; } +} +"#, + ); + } + + #[test] + fn test_fill_struct_fields_enum() { + check_fix( + r#" +enum Expr { + Bin { lhs: Box, rhs: Box } +} + +impl Expr { + fn new_bin(lhs: Box, rhs: Box) -> Expr { + Expr::Bin {<|> } + } +} +"#, + r#" +enum Expr { + Bin { lhs: Box, rhs: Box } +} + +impl Expr { + fn new_bin(lhs: Box, rhs: Box) -> Expr { + Expr::Bin { lhs: (), rhs: () } + } +} +"#, + ); + } + + #[test] + fn test_fill_struct_fields_partial() { + check_fix( + r#" +struct TestStruct { one: i32, two: i64 } + +fn test_fn() { + let s = TestStruct{ two: 2<|> }; +} +"#, + r" +struct TestStruct { one: i32, two: i64 } + +fn test_fn() { + let s = TestStruct{ two: 2, one: () }; +} +", + ); + } + + #[test] + fn test_fill_struct_fields_no_diagnostic() { + check_no_diagnostics( + r" + struct TestStruct { one: i32, two: i64 } + + fn test_fn() { + let one = 1; + let s = TestStruct{ one, two: 2 }; + } + ", + ); + } + + #[test] + fn test_fill_struct_fields_no_diagnostic_on_spread() { + check_no_diagnostics( + r" + struct TestStruct { one: i32, two: i64 } + + fn test_fn() { + let one = 1; + let s = TestStruct{ ..a }; + } + ", + ); + } + + #[test] + fn test_unresolved_module_diagnostic() { + check_expect( + r#"mod foo;"#, + expect![[r#" + [ + Diagnostic { + message: "unresolved module", + range: 0..8, + severity: Error, + fix: Some( + Fix { + label: "Create module", + source_change: SourceChange { + source_file_edits: [], + file_system_edits: [ + CreateFile { + anchor: FileId( + 1, + ), + dst: "foo.rs", + }, + ], + is_snippet: false, + }, + fix_trigger_range: 0..8, + }, + ), + }, + ] + "#]], + ); + } + + #[test] + fn range_mapping_out_of_macros() { + // FIXME: this is very wrong, but somewhat tricky to fix. + check_fix( + r#" +fn some() {} +fn items() {} +fn here() {} + +macro_rules! id { ($($tt:tt)*) => { $($tt)*}; } + +fn main() { + let _x = id![Foo { a: <|>42 }]; +} + +pub struct Foo { pub a: i32, pub b: i32 } +"#, + r#" +fn {a:42, b: ()} {} +fn items() {} +fn here() {} + +macro_rules! id { ($($tt:tt)*) => { $($tt)*}; } + +fn main() { + let _x = id![Foo { a: 42 }]; +} + +pub struct Foo { pub a: i32, pub b: i32 } +"#, + ); + } + + #[test] + fn test_check_unnecessary_braces_in_use_statement() { + check_no_diagnostics( + r#" +use a; +use a::{c, d::e}; +"#, + ); + check_fix(r#"use {<|>b};"#, r#"use b;"#); + check_fix(r#"use {b<|>};"#, r#"use b;"#); + check_fix(r#"use a::{c<|>};"#, r#"use a::c;"#); + check_fix(r#"use a::{self<|>};"#, r#"use a;"#); + check_fix(r#"use a::{c, d::{e<|>}};"#, r#"use a::{c, d::e};"#); + } + + #[test] + fn test_check_struct_shorthand_initialization() { + check_no_diagnostics( + r#" +struct A { a: &'static str } +fn main() { A { a: "hello" } } +"#, + ); + check_no_diagnostics( + r#" +struct A(usize); +fn main() { A { 0: 0 } } +"#, + ); + + check_fix( + r#" +struct A { a: &'static str } +fn main() { + let a = "haha"; + A { a<|>: a } +} +"#, + r#" +struct A { a: &'static str } +fn main() { + let a = "haha"; + A { a } +} +"#, + ); + + check_fix( + r#" +struct A { a: &'static str, b: &'static str } +fn main() { + let a = "haha"; + let b = "bb"; + A { a<|>: a, b } +} +"#, + r#" +struct A { a: &'static str, b: &'static str } +fn main() { + let a = "haha"; + let b = "bb"; + A { a, b } +} +"#, + ); + } + + #[test] + fn test_add_field_from_usage() { + check_fix( + r" +fn main() { + Foo { bar: 3, baz<|>: false}; +} +struct Foo { + bar: i32 +} +", + r" +fn main() { + Foo { bar: 3, baz: false}; +} +struct Foo { + bar: i32, + baz: bool +} +", + ) + } + + #[test] + fn test_add_field_in_other_file_from_usage() { + check_apply_diagnostic_fix_in_other_file( + r" + //- /main.rs + mod foo; + + fn main() { + <|>foo::Foo { bar: 3, baz: false}; + } + //- /foo.rs + struct Foo { + bar: i32 + } + ", + r" + struct Foo { + bar: i32, + pub(crate) baz: bool + } + ", + ) + } +} diff --git a/crates/ide/src/diagnostics/diagnostics_with_fix.rs b/crates/ide/src/diagnostics/diagnostics_with_fix.rs new file mode 100644 index 000000000..85b46c995 --- /dev/null +++ b/crates/ide/src/diagnostics/diagnostics_with_fix.rs @@ -0,0 +1,171 @@ +//! Provides a way to attach fixes to the diagnostics. +//! The same module also has all curret custom fixes for the diagnostics implemented. +use crate::Fix; +use ast::{edit::IndentLevel, make}; +use base_db::FileId; +use hir::{ + db::AstDatabase, + diagnostics::{Diagnostic, MissingFields, MissingOkInTailExpr, NoSuchField, UnresolvedModule}, + HasSource, HirDisplay, Semantics, VariantDef, +}; +use ide_db::{ + source_change::{FileSystemEdit, SourceFileEdit}, + RootDatabase, +}; +use syntax::{algo, ast, AstNode}; +use text_edit::TextEdit; + +/// A [Diagnostic] that potentially has a fix available. +/// +/// [Diagnostic]: hir::diagnostics::Diagnostic +pub trait DiagnosticWithFix: Diagnostic { + fn fix(&self, sema: &Semantics) -> Option; +} + +impl DiagnosticWithFix for UnresolvedModule { + fn fix(&self, sema: &Semantics) -> Option { + let root = sema.db.parse_or_expand(self.file)?; + let unresolved_module = self.decl.to_node(&root); + Some(Fix::new( + "Create module", + FileSystemEdit::CreateFile { + anchor: self.file.original_file(sema.db), + dst: self.candidate.clone(), + } + .into(), + unresolved_module.syntax().text_range(), + )) + } +} + +impl DiagnosticWithFix for NoSuchField { + fn fix(&self, sema: &Semantics) -> Option { + let root = sema.db.parse_or_expand(self.file)?; + missing_record_expr_field_fix( + &sema, + self.file.original_file(sema.db), + &self.field.to_node(&root), + ) + } +} + +impl DiagnosticWithFix for MissingFields { + fn fix(&self, sema: &Semantics) -> Option { + // Note that although we could add a diagnostics to + // fill the missing tuple field, e.g : + // `struct A(usize);` + // `let a = A { 0: () }` + // but it is uncommon usage and it should not be encouraged. + if self.missed_fields.iter().any(|it| it.as_tuple_index().is_some()) { + return None; + } + + let root = sema.db.parse_or_expand(self.file)?; + let old_field_list = self.field_list_parent.to_node(&root).record_expr_field_list()?; + let mut new_field_list = old_field_list.clone(); + for f in self.missed_fields.iter() { + let field = + make::record_expr_field(make::name_ref(&f.to_string()), Some(make::expr_unit())); + new_field_list = new_field_list.append_field(&field); + } + + let edit = { + let mut builder = TextEdit::builder(); + algo::diff(&old_field_list.syntax(), &new_field_list.syntax()) + .into_text_edit(&mut builder); + builder.finish() + }; + Some(Fix::new( + "Fill struct fields", + SourceFileEdit { file_id: self.file.original_file(sema.db), edit }.into(), + sema.original_range(&old_field_list.syntax()).range, + )) + } +} + +impl DiagnosticWithFix for MissingOkInTailExpr { + fn fix(&self, sema: &Semantics) -> Option { + let root = sema.db.parse_or_expand(self.file)?; + let tail_expr = self.expr.to_node(&root); + let tail_expr_range = tail_expr.syntax().text_range(); + let edit = TextEdit::replace(tail_expr_range, format!("Ok({})", tail_expr.syntax())); + let source_change = + SourceFileEdit { file_id: self.file.original_file(sema.db), edit }.into(); + Some(Fix::new("Wrap with ok", source_change, tail_expr_range)) + } +} + +fn missing_record_expr_field_fix( + sema: &Semantics, + usage_file_id: FileId, + record_expr_field: &ast::RecordExprField, +) -> Option { + let record_lit = ast::RecordExpr::cast(record_expr_field.syntax().parent()?.parent()?)?; + let def_id = sema.resolve_variant(record_lit)?; + let module; + let def_file_id; + let record_fields = match VariantDef::from(def_id) { + VariantDef::Struct(s) => { + module = s.module(sema.db); + let source = s.source(sema.db); + def_file_id = source.file_id; + let fields = source.value.field_list()?; + record_field_list(fields)? + } + VariantDef::Union(u) => { + module = u.module(sema.db); + let source = u.source(sema.db); + def_file_id = source.file_id; + source.value.record_field_list()? + } + VariantDef::EnumVariant(e) => { + module = e.module(sema.db); + let source = e.source(sema.db); + def_file_id = source.file_id; + let fields = source.value.field_list()?; + record_field_list(fields)? + } + }; + let def_file_id = def_file_id.original_file(sema.db); + + let new_field_type = sema.type_of_expr(&record_expr_field.expr()?)?; + if new_field_type.is_unknown() { + return None; + } + let new_field = make::record_field( + record_expr_field.field_name()?, + make::ty(&new_field_type.display_source_code(sema.db, module.into()).ok()?), + ); + + let last_field = record_fields.fields().last()?; + let last_field_syntax = last_field.syntax(); + let indent = IndentLevel::from_node(last_field_syntax); + + let mut new_field = new_field.to_string(); + if usage_file_id != def_file_id { + new_field = format!("pub(crate) {}", new_field); + } + new_field = format!("\n{}{}", indent, new_field); + + let needs_comma = !last_field_syntax.to_string().ends_with(','); + if needs_comma { + new_field = format!(",{}", new_field); + } + + let source_change = SourceFileEdit { + file_id: def_file_id, + edit: TextEdit::insert(last_field_syntax.text_range().end(), new_field), + }; + return Some(Fix::new( + "Create field", + source_change.into(), + record_expr_field.syntax().text_range(), + )); + + fn record_field_list(field_def_list: ast::FieldList) -> Option { + match field_def_list { + ast::FieldList::RecordFieldList(it) => Some(it), + ast::FieldList::TupleFieldList(_) => None, + } + } +} diff --git a/crates/ide/src/display.rs b/crates/ide/src/display.rs new file mode 100644 index 000000000..41b5bdc49 --- /dev/null +++ b/crates/ide/src/display.rs @@ -0,0 +1,83 @@ +//! This module contains utilities for turning SyntaxNodes and HIR types +//! into types that may be used to render in a UI. + +mod navigation_target; +mod short_label; + +use syntax::{ + ast::{self, AstNode, AttrsOwner, GenericParamsOwner, NameOwner}, + SyntaxKind::{ATTR, COMMENT}, +}; + +use ast::VisibilityOwner; +use stdx::format_to; + +pub use navigation_target::NavigationTarget; +pub(crate) use navigation_target::{ToNav, TryToNav}; +pub(crate) use short_label::ShortLabel; + +pub(crate) fn function_declaration(node: &ast::Fn) -> String { + let mut buf = String::new(); + if let Some(vis) = node.visibility() { + format_to!(buf, "{} ", vis); + } + if node.async_token().is_some() { + format_to!(buf, "async "); + } + if node.const_token().is_some() { + format_to!(buf, "const "); + } + if node.unsafe_token().is_some() { + format_to!(buf, "unsafe "); + } + if let Some(abi) = node.abi() { + // Keyword `extern` is included in the string. + format_to!(buf, "{} ", abi); + } + if let Some(name) = node.name() { + format_to!(buf, "fn {}", name) + } + if let Some(type_params) = node.generic_param_list() { + format_to!(buf, "{}", type_params); + } + if let Some(param_list) = node.param_list() { + format_to!(buf, "{}", param_list); + } + if let Some(ret_type) = node.ret_type() { + if ret_type.ty().is_some() { + format_to!(buf, " {}", ret_type); + } + } + if let Some(where_clause) = node.where_clause() { + format_to!(buf, "\n{}", where_clause); + } + buf +} + +pub(crate) fn const_label(node: &ast::Const) -> String { + let label: String = node + .syntax() + .children_with_tokens() + .filter(|child| !(child.kind() == COMMENT || child.kind() == ATTR)) + .map(|node| node.to_string()) + .collect(); + + label.trim().to_owned() +} + +pub(crate) fn type_label(node: &ast::TypeAlias) -> String { + let label: String = node + .syntax() + .children_with_tokens() + .filter(|child| !(child.kind() == COMMENT || child.kind() == ATTR)) + .map(|node| node.to_string()) + .collect(); + + label.trim().to_owned() +} + +pub(crate) fn macro_label(node: &ast::MacroCall) -> String { + let name = node.name().map(|name| name.syntax().text().to_string()).unwrap_or_default(); + let vis = if node.has_atom_attr("macro_export") { "#[macro_export]\n" } else { "" }; + format!("{}macro_rules! {}", vis, name) +} diff --git a/crates/ide/src/display/navigation_target.rs b/crates/ide/src/display/navigation_target.rs new file mode 100644 index 000000000..e77106177 --- /dev/null +++ b/crates/ide/src/display/navigation_target.rs @@ -0,0 +1,491 @@ +//! FIXME: write short doc here + +use base_db::{FileId, SourceDatabase}; +use either::Either; +use hir::{original_range, AssocItem, FieldSource, HasSource, InFile, ModuleSource}; +use ide_db::{defs::Definition, RootDatabase}; +use syntax::{ + ast::{self, DocCommentsOwner, NameOwner}, + match_ast, AstNode, SmolStr, + SyntaxKind::{self, IDENT_PAT, TYPE_PARAM}, + TextRange, +}; + +use crate::FileSymbol; + +use super::short_label::ShortLabel; + +/// `NavigationTarget` represents and element in the editor's UI which you can +/// click on to navigate to a particular piece of code. +/// +/// Typically, a `NavigationTarget` corresponds to some element in the source +/// code, like a function or a struct, but this is not strictly required. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct NavigationTarget { + pub file_id: FileId, + /// Range which encompasses the whole element. + /// + /// Should include body, doc comments, attributes, etc. + /// + /// Clients should use this range to answer "is the cursor inside the + /// element?" question. + pub full_range: TextRange, + /// A "most interesting" range withing the `full_range`. + /// + /// Typically, `full_range` is the whole syntax node, including doc + /// comments, and `focus_range` is the range of the identifier. "Most + /// interesting" range within the full range, typically the range of + /// identifier. + /// + /// Clients should place the cursor on this range when navigating to this target. + pub focus_range: Option, + pub name: SmolStr, + pub kind: SyntaxKind, + pub container_name: Option, + pub description: Option, + pub docs: Option, +} + +pub(crate) trait ToNav { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget; +} + +pub(crate) trait TryToNav { + fn try_to_nav(&self, db: &RootDatabase) -> Option; +} + +impl NavigationTarget { + pub fn focus_or_full_range(&self) -> TextRange { + self.focus_range.unwrap_or(self.full_range) + } + + pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget { + let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default(); + if let Some(src) = module.declaration_source(db) { + let frange = original_range(db, src.as_ref().map(|it| it.syntax())); + let mut res = NavigationTarget::from_syntax( + frange.file_id, + name, + None, + frange.range, + src.value.syntax().kind(), + ); + res.docs = src.value.doc_comment_text(); + res.description = src.value.short_label(); + return res; + } + module.to_nav(db) + } + + #[cfg(test)] + pub(crate) fn assert_match(&self, expected: &str) { + let actual = self.debug_render(); + test_utils::assert_eq_text!(expected.trim(), actual.trim(),); + } + + #[cfg(test)] + pub(crate) fn debug_render(&self) -> String { + let mut buf = + format!("{} {:?} {:?} {:?}", self.name, self.kind, self.file_id, self.full_range); + if let Some(focus_range) = self.focus_range { + buf.push_str(&format!(" {:?}", focus_range)) + } + if let Some(container_name) = &self.container_name { + buf.push_str(&format!(" {}", container_name)) + } + buf + } + + /// Allows `NavigationTarget` to be created from a `NameOwner` + pub(crate) fn from_named( + db: &RootDatabase, + node: InFile<&dyn ast::NameOwner>, + ) -> NavigationTarget { + let name = + node.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_")); + let focus_range = + node.value.name().map(|it| original_range(db, node.with_value(it.syntax())).range); + let frange = original_range(db, node.map(|it| it.syntax())); + + NavigationTarget::from_syntax( + frange.file_id, + name, + focus_range, + frange.range, + node.value.syntax().kind(), + ) + } + + /// Allows `NavigationTarget` to be created from a `DocCommentsOwner` and a `NameOwner` + pub(crate) fn from_doc_commented( + db: &RootDatabase, + named: InFile<&dyn ast::NameOwner>, + node: InFile<&dyn ast::DocCommentsOwner>, + ) -> NavigationTarget { + let name = + named.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_")); + let frange = original_range(db, node.map(|it| it.syntax())); + + NavigationTarget::from_syntax( + frange.file_id, + name, + None, + frange.range, + node.value.syntax().kind(), + ) + } + + fn from_syntax( + file_id: FileId, + name: SmolStr, + focus_range: Option, + full_range: TextRange, + kind: SyntaxKind, + ) -> NavigationTarget { + NavigationTarget { + file_id, + name, + kind, + full_range, + focus_range, + container_name: None, + description: None, + docs: None, + } + } +} + +impl ToNav for FileSymbol { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + NavigationTarget { + file_id: self.file_id, + name: self.name.clone(), + kind: self.kind, + full_range: self.range, + focus_range: self.name_range, + container_name: self.container_name.clone(), + description: description_from_symbol(db, self), + docs: docs_from_symbol(db, self), + } + } +} + +impl TryToNav for Definition { + fn try_to_nav(&self, db: &RootDatabase) -> Option { + match self { + Definition::Macro(it) => Some(it.to_nav(db)), + Definition::Field(it) => Some(it.to_nav(db)), + Definition::ModuleDef(it) => it.try_to_nav(db), + Definition::SelfType(it) => Some(it.to_nav(db)), + Definition::Local(it) => Some(it.to_nav(db)), + Definition::TypeParam(it) => Some(it.to_nav(db)), + } + } +} + +impl TryToNav for hir::ModuleDef { + fn try_to_nav(&self, db: &RootDatabase) -> Option { + let res = match self { + hir::ModuleDef::Module(it) => it.to_nav(db), + hir::ModuleDef::Function(it) => it.to_nav(db), + hir::ModuleDef::Adt(it) => it.to_nav(db), + hir::ModuleDef::EnumVariant(it) => it.to_nav(db), + hir::ModuleDef::Const(it) => it.to_nav(db), + hir::ModuleDef::Static(it) => it.to_nav(db), + hir::ModuleDef::Trait(it) => it.to_nav(db), + hir::ModuleDef::TypeAlias(it) => it.to_nav(db), + hir::ModuleDef::BuiltinType(_) => return None, + }; + Some(res) + } +} + +pub(crate) trait ToNavFromAst {} +impl ToNavFromAst for hir::Function {} +impl ToNavFromAst for hir::Const {} +impl ToNavFromAst for hir::Static {} +impl ToNavFromAst for hir::Struct {} +impl ToNavFromAst for hir::Enum {} +impl ToNavFromAst for hir::EnumVariant {} +impl ToNavFromAst for hir::Union {} +impl ToNavFromAst for hir::TypeAlias {} +impl ToNavFromAst for hir::Trait {} + +impl ToNav for D +where + D: HasSource + ToNavFromAst + Copy, + D::Ast: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, +{ + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.source(db); + let mut res = + NavigationTarget::from_named(db, src.as_ref().map(|it| it as &dyn ast::NameOwner)); + res.docs = src.value.doc_comment_text(); + res.description = src.value.short_label(); + res + } +} + +impl ToNav for hir::Module { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.definition_source(db); + let name = self.name(db).map(|it| it.to_string().into()).unwrap_or_default(); + let (syntax, focus) = match &src.value { + ModuleSource::SourceFile(node) => (node.syntax(), None), + ModuleSource::Module(node) => { + (node.syntax(), node.name().map(|it| it.syntax().text_range())) + } + }; + let frange = original_range(db, src.with_value(syntax)); + NavigationTarget::from_syntax(frange.file_id, name, focus, frange.range, syntax.kind()) + } +} + +impl ToNav for hir::ImplDef { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.source(db); + let derive_attr = self.is_builtin_derive(db); + let frange = if let Some(item) = &derive_attr { + original_range(db, item.syntax()) + } else { + original_range(db, src.as_ref().map(|it| it.syntax())) + }; + let focus_range = if derive_attr.is_some() { + None + } else { + src.value.self_ty().map(|ty| original_range(db, src.with_value(ty.syntax())).range) + }; + + NavigationTarget::from_syntax( + frange.file_id, + "impl".into(), + focus_range, + frange.range, + src.value.syntax().kind(), + ) + } +} + +impl ToNav for hir::Field { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.source(db); + + match &src.value { + FieldSource::Named(it) => { + let mut res = NavigationTarget::from_named(db, src.with_value(it)); + res.docs = it.doc_comment_text(); + res.description = it.short_label(); + res + } + FieldSource::Pos(it) => { + let frange = original_range(db, src.with_value(it.syntax())); + NavigationTarget::from_syntax( + frange.file_id, + "".into(), + None, + frange.range, + it.syntax().kind(), + ) + } + } + } +} + +impl ToNav for hir::MacroDef { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.source(db); + log::debug!("nav target {:#?}", src.value.syntax()); + let mut res = + NavigationTarget::from_named(db, src.as_ref().map(|it| it as &dyn ast::NameOwner)); + res.docs = src.value.doc_comment_text(); + res + } +} + +impl ToNav for hir::Adt { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + match self { + hir::Adt::Struct(it) => it.to_nav(db), + hir::Adt::Union(it) => it.to_nav(db), + hir::Adt::Enum(it) => it.to_nav(db), + } + } +} + +impl ToNav for hir::AssocItem { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + match self { + AssocItem::Function(it) => it.to_nav(db), + AssocItem::Const(it) => it.to_nav(db), + AssocItem::TypeAlias(it) => it.to_nav(db), + } + } +} + +impl ToNav for hir::Local { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.source(db); + let node = match &src.value { + Either::Left(bind_pat) => { + bind_pat.name().map_or_else(|| bind_pat.syntax().clone(), |it| it.syntax().clone()) + } + Either::Right(it) => it.syntax().clone(), + }; + let full_range = original_range(db, src.with_value(&node)); + let name = match self.name(db) { + Some(it) => it.to_string().into(), + None => "".into(), + }; + NavigationTarget { + file_id: full_range.file_id, + name, + kind: IDENT_PAT, + full_range: full_range.range, + focus_range: None, + container_name: None, + description: None, + docs: None, + } + } +} + +impl ToNav for hir::TypeParam { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.source(db); + let full_range = match &src.value { + Either::Left(it) => it.syntax().text_range(), + Either::Right(it) => it.syntax().text_range(), + }; + let focus_range = match &src.value { + Either::Left(_) => None, + Either::Right(it) => it.name().map(|it| it.syntax().text_range()), + }; + NavigationTarget { + file_id: src.file_id.original_file(db), + name: self.name(db).to_string().into(), + kind: TYPE_PARAM, + full_range, + focus_range, + container_name: None, + description: None, + docs: None, + } + } +} + +pub(crate) fn docs_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option { + let parse = db.parse(symbol.file_id); + let node = symbol.ptr.to_node(parse.tree().syntax()); + + match_ast! { + match node { + ast::Fn(it) => it.doc_comment_text(), + ast::Struct(it) => it.doc_comment_text(), + ast::Enum(it) => it.doc_comment_text(), + ast::Trait(it) => it.doc_comment_text(), + ast::Module(it) => it.doc_comment_text(), + ast::TypeAlias(it) => it.doc_comment_text(), + ast::Const(it) => it.doc_comment_text(), + ast::Static(it) => it.doc_comment_text(), + ast::RecordField(it) => it.doc_comment_text(), + ast::Variant(it) => it.doc_comment_text(), + ast::MacroCall(it) => it.doc_comment_text(), + _ => None, + } + } +} + +/// Get a description of a symbol. +/// +/// e.g. `struct Name`, `enum Name`, `fn Name` +pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option { + let parse = db.parse(symbol.file_id); + let node = symbol.ptr.to_node(parse.tree().syntax()); + + match_ast! { + match node { + ast::Fn(it) => it.short_label(), + ast::Struct(it) => it.short_label(), + ast::Enum(it) => it.short_label(), + ast::Trait(it) => it.short_label(), + ast::Module(it) => it.short_label(), + ast::TypeAlias(it) => it.short_label(), + ast::Const(it) => it.short_label(), + ast::Static(it) => it.short_label(), + ast::RecordField(it) => it.short_label(), + ast::Variant(it) => it.short_label(), + _ => None, + } + } +} + +#[cfg(test)] +mod tests { + use expect::expect; + + use crate::{mock_analysis::single_file, Query}; + + #[test] + fn test_nav_for_symbol() { + let (analysis, _) = single_file( + r#" +enum FooInner { } +fn foo() { enum FooInner { } } +"#, + ); + + let navs = analysis.symbol_search(Query::new("FooInner".to_string())).unwrap(); + expect![[r#" + [ + NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..17, + focus_range: Some( + 5..13, + ), + name: "FooInner", + kind: ENUM, + container_name: None, + description: Some( + "enum FooInner", + ), + docs: None, + }, + NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 29..46, + focus_range: Some( + 34..42, + ), + name: "FooInner", + kind: ENUM, + container_name: Some( + "foo", + ), + description: Some( + "enum FooInner", + ), + docs: None, + }, + ] + "#]] + .assert_debug_eq(&navs); + } + + #[test] + fn test_world_symbols_are_case_sensitive() { + let (analysis, _) = single_file( + r#" +fn foo() {} +struct Foo; +"#, + ); + + let navs = analysis.symbol_search(Query::new("foo".to_string())).unwrap(); + assert_eq!(navs.len(), 2) + } +} diff --git a/crates/ide/src/display/short_label.rs b/crates/ide/src/display/short_label.rs new file mode 100644 index 000000000..ea49d9f97 --- /dev/null +++ b/crates/ide/src/display/short_label.rs @@ -0,0 +1,111 @@ +//! FIXME: write short doc here + +use stdx::format_to; +use syntax::ast::{self, AstNode, NameOwner, VisibilityOwner}; + +pub(crate) trait ShortLabel { + fn short_label(&self) -> Option; +} + +impl ShortLabel for ast::Fn { + fn short_label(&self) -> Option { + Some(crate::display::function_declaration(self)) + } +} + +impl ShortLabel for ast::Struct { + fn short_label(&self) -> Option { + short_label_from_node(self, "struct ") + } +} + +impl ShortLabel for ast::Union { + fn short_label(&self) -> Option { + short_label_from_node(self, "union ") + } +} + +impl ShortLabel for ast::Enum { + fn short_label(&self) -> Option { + short_label_from_node(self, "enum ") + } +} + +impl ShortLabel for ast::Trait { + fn short_label(&self) -> Option { + if self.unsafe_token().is_some() { + short_label_from_node(self, "unsafe trait ") + } else { + short_label_from_node(self, "trait ") + } + } +} + +impl ShortLabel for ast::Module { + fn short_label(&self) -> Option { + short_label_from_node(self, "mod ") + } +} + +impl ShortLabel for ast::SourceFile { + fn short_label(&self) -> Option { + None + } +} + +impl ShortLabel for ast::TypeAlias { + fn short_label(&self) -> Option { + short_label_from_node(self, "type ") + } +} + +impl ShortLabel for ast::Const { + fn short_label(&self) -> Option { + let mut new_buf = short_label_from_ty(self, self.ty(), "const ")?; + if let Some(expr) = self.body() { + format_to!(new_buf, " = {}", expr.syntax()); + } + Some(new_buf) + } +} + +impl ShortLabel for ast::Static { + fn short_label(&self) -> Option { + short_label_from_ty(self, self.ty(), "static ") + } +} + +impl ShortLabel for ast::RecordField { + fn short_label(&self) -> Option { + short_label_from_ty(self, self.ty(), "") + } +} + +impl ShortLabel for ast::Variant { + fn short_label(&self) -> Option { + Some(self.name()?.text().to_string()) + } +} + +fn short_label_from_ty(node: &T, ty: Option, prefix: &str) -> Option +where + T: NameOwner + VisibilityOwner, +{ + let mut buf = short_label_from_node(node, prefix)?; + + if let Some(type_ref) = ty { + format_to!(buf, ": {}", type_ref.syntax()); + } + + Some(buf) +} + +fn short_label_from_node(node: &T, label: &str) -> Option +where + T: NameOwner + VisibilityOwner, +{ + let mut buf = node.visibility().map(|v| format!("{} ", v.syntax())).unwrap_or_default(); + buf.push_str(label); + buf.push_str(node.name()?.text().as_str()); + Some(buf) +} diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs new file mode 100644 index 000000000..31455709d --- /dev/null +++ b/crates/ide/src/expand_macro.rs @@ -0,0 +1,283 @@ +use hir::Semantics; +use ide_db::RootDatabase; +use syntax::{ + algo::{find_node_at_offset, SyntaxRewriter}, + ast, AstNode, NodeOrToken, SyntaxKind, + SyntaxKind::*, + SyntaxNode, WalkEvent, T, +}; + +use crate::FilePosition; + +pub struct ExpandedMacro { + pub name: String, + pub expansion: String, +} + +// Feature: Expand Macro Recursively +// +// Shows the full macro expansion of the macro at current cursor. +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Rust Analyzer: Expand macro recursively** +// |=== +pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option { + let sema = Semantics::new(db); + let file = sema.parse(position.file_id); + let name_ref = find_node_at_offset::(file.syntax(), position.offset)?; + let mac = name_ref.syntax().ancestors().find_map(ast::MacroCall::cast)?; + + let expanded = expand_macro_recur(&sema, &mac)?; + + // FIXME: + // macro expansion may lose all white space information + // But we hope someday we can use ra_fmt for that + let expansion = insert_whitespaces(expanded); + Some(ExpandedMacro { name: name_ref.text().to_string(), expansion }) +} + +fn expand_macro_recur( + sema: &Semantics, + macro_call: &ast::MacroCall, +) -> Option { + let mut expanded = sema.expand(macro_call)?; + + let children = expanded.descendants().filter_map(ast::MacroCall::cast); + let mut rewriter = SyntaxRewriter::default(); + + for child in children.into_iter() { + if let Some(new_node) = expand_macro_recur(sema, &child) { + // Replace the whole node if it is root + // `replace_descendants` will not replace the parent node + // but `SyntaxNode::descendants include itself + if expanded == *child.syntax() { + expanded = new_node; + } else { + rewriter.replace(child.syntax(), &new_node) + } + } + } + + let res = rewriter.rewrite(&expanded); + Some(res) +} + +// FIXME: It would also be cool to share logic here and in the mbe tests, +// which are pretty unreadable at the moment. +fn insert_whitespaces(syn: SyntaxNode) -> String { + let mut res = String::new(); + let mut token_iter = syn + .preorder_with_tokens() + .filter_map(|event| { + if let WalkEvent::Enter(NodeOrToken::Token(token)) = event { + Some(token) + } else { + None + } + }) + .peekable(); + + let mut indent = 0; + let mut last: Option = None; + + while let Some(token) = token_iter.next() { + let mut is_next = |f: fn(SyntaxKind) -> bool, default| -> bool { + token_iter.peek().map(|it| f(it.kind())).unwrap_or(default) + }; + let is_last = + |f: fn(SyntaxKind) -> bool, default| -> bool { last.map(f).unwrap_or(default) }; + + res += &match token.kind() { + k if is_text(k) && is_next(|it| !it.is_punct(), true) => token.text().to_string() + " ", + L_CURLY if is_next(|it| it != R_CURLY, true) => { + indent += 1; + let leading_space = if is_last(is_text, false) { " " } else { "" }; + format!("{}{{\n{}", leading_space, " ".repeat(indent)) + } + R_CURLY if is_last(|it| it != L_CURLY, true) => { + indent = indent.saturating_sub(1); + format!("\n{}}}", " ".repeat(indent)) + } + R_CURLY => format!("}}\n{}", " ".repeat(indent)), + T![;] => format!(";\n{}", " ".repeat(indent)), + T![->] => " -> ".to_string(), + T![=] => " = ".to_string(), + T![=>] => " => ".to_string(), + _ => token.text().to_string(), + }; + + last = Some(token.kind()); + } + + return res; + + fn is_text(k: SyntaxKind) -> bool { + k.is_keyword() || k.is_literal() || k == IDENT + } +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::mock_analysis::analysis_and_position; + + fn check(ra_fixture: &str, expect: Expect) { + let (analysis, pos) = analysis_and_position(ra_fixture); + let expansion = analysis.expand_macro(pos).unwrap().unwrap(); + let actual = format!("{}\n{}", expansion.name, expansion.expansion); + expect.assert_eq(&actual); + } + + #[test] + fn macro_expand_recursive_expansion() { + check( + r#" +macro_rules! bar { + () => { fn b() {} } +} +macro_rules! foo { + () => { bar!(); } +} +macro_rules! baz { + () => { foo!(); } +} +f<|>oo!(); +"#, + expect![[r#" + foo + fn b(){} + "#]], + ); + } + + #[test] + fn macro_expand_multiple_lines() { + check( + r#" +macro_rules! foo { + () => { + fn some_thing() -> u32 { + let a = 0; + a + 10 + } + } +} +f<|>oo!(); + "#, + expect![[r#" + foo + fn some_thing() -> u32 { + let a = 0; + a+10 + }"#]], + ); + } + + #[test] + fn macro_expand_match_ast() { + check( + r#" +macro_rules! match_ast { + (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; + (match ($node:expr) { + $( ast::$ast:ident($it:ident) => $res:block, )* + _ => $catch_all:expr $(,)? + }) => {{ + $( if let Some($it) = ast::$ast::cast($node.clone()) $res else )* + { $catch_all } + }}; +} + +fn main() { + mat<|>ch_ast! { + match container { + ast::TraitDef(it) => {}, + ast::ImplDef(it) => {}, + _ => { continue }, + } + } +} +"#, + expect![[r#" + match_ast + { + if let Some(it) = ast::TraitDef::cast(container.clone()){} + else if let Some(it) = ast::ImplDef::cast(container.clone()){} + else { + { + continue + } + } + }"#]], + ); + } + + #[test] + fn macro_expand_match_ast_inside_let_statement() { + check( + r#" +macro_rules! match_ast { + (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; + (match ($node:expr) {}) => {{}}; +} + +fn main() { + let p = f(|it| { + let res = mat<|>ch_ast! { match c {}}; + Some(res) + })?; +} +"#, + expect![[r#" + match_ast + {} + "#]], + ); + } + + #[test] + fn macro_expand_inner_macro_fail_to_expand() { + check( + r#" +macro_rules! bar { + (BAD) => {}; +} +macro_rules! foo { + () => {bar!()}; +} + +fn main() { + let res = fo<|>o!(); +} +"#, + expect![[r#" + foo + "#]], + ); + } + + #[test] + fn macro_expand_with_dollar_crate() { + check( + r#" +#[macro_export] +macro_rules! bar { + () => {0}; +} +macro_rules! foo { + () => {$crate::bar!()}; +} + +fn main() { + let res = fo<|>o!(); +} +"#, + expect![[r#" + foo + 0 "#]], + ); + } +} diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs new file mode 100644 index 000000000..34563a026 --- /dev/null +++ b/crates/ide/src/extend_selection.rs @@ -0,0 +1,654 @@ +use std::iter::successors; + +use hir::Semantics; +use ide_db::RootDatabase; +use syntax::{ + algo::{self, find_covering_element, skip_trivia_token}, + ast::{self, AstNode, AstToken}, + Direction, NodeOrToken, + SyntaxKind::{self, *}, + SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, T, +}; + +use crate::FileRange; + +// Feature: Extend Selection +// +// Extends the current selection to the encompassing syntactic construct +// (expression, statement, item, module, etc). It works with multiple cursors. +// +// |=== +// | Editor | Shortcut +// +// | VS Code | kbd:[Ctrl+Shift+→] +// |=== +pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { + let sema = Semantics::new(db); + let src = sema.parse(frange.file_id); + try_extend_selection(&sema, src.syntax(), frange).unwrap_or(frange.range) +} + +fn try_extend_selection( + sema: &Semantics, + root: &SyntaxNode, + frange: FileRange, +) -> Option { + let range = frange.range; + + let string_kinds = [COMMENT, STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING]; + let list_kinds = [ + RECORD_PAT_FIELD_LIST, + MATCH_ARM_LIST, + RECORD_FIELD_LIST, + TUPLE_FIELD_LIST, + RECORD_EXPR_FIELD_LIST, + VARIANT_LIST, + USE_TREE_LIST, + GENERIC_PARAM_LIST, + GENERIC_ARG_LIST, + TYPE_BOUND_LIST, + PARAM_LIST, + ARG_LIST, + ARRAY_EXPR, + TUPLE_EXPR, + TUPLE_TYPE, + TUPLE_PAT, + WHERE_CLAUSE, + ]; + + if range.is_empty() { + let offset = range.start(); + let mut leaves = root.token_at_offset(offset); + if leaves.clone().all(|it| it.kind() == WHITESPACE) { + return Some(extend_ws(root, leaves.next()?, offset)); + } + let leaf_range = match leaves { + TokenAtOffset::None => return None, + TokenAtOffset::Single(l) => { + if string_kinds.contains(&l.kind()) { + extend_single_word_in_comment_or_string(&l, offset) + .unwrap_or_else(|| l.text_range()) + } else { + l.text_range() + } + } + TokenAtOffset::Between(l, r) => pick_best(l, r).text_range(), + }; + return Some(leaf_range); + }; + let node = match find_covering_element(root, range) { + NodeOrToken::Token(token) => { + if token.text_range() != range { + return Some(token.text_range()); + } + if let Some(comment) = ast::Comment::cast(token.clone()) { + if let Some(range) = extend_comments(comment) { + return Some(range); + } + } + token.parent() + } + NodeOrToken::Node(node) => node, + }; + + // if we are in single token_tree, we maybe live in macro or attr + if node.kind() == TOKEN_TREE { + if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) { + if let Some(range) = extend_tokens_from_range(sema, macro_call, range) { + return Some(range); + } + } + } + + if node.text_range() != range { + return Some(node.text_range()); + } + + let node = shallowest_node(&node); + + if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { + if let Some(range) = extend_list_item(&node) { + return Some(range); + } + } + + node.parent().map(|it| it.text_range()) +} + +fn extend_tokens_from_range( + sema: &Semantics, + macro_call: ast::MacroCall, + original_range: TextRange, +) -> Option { + let src = find_covering_element(¯o_call.syntax(), original_range); + let (first_token, last_token) = match src { + NodeOrToken::Node(it) => (it.first_token()?, it.last_token()?), + NodeOrToken::Token(it) => (it.clone(), it), + }; + + let mut first_token = skip_trivia_token(first_token, Direction::Next)?; + let mut last_token = skip_trivia_token(last_token, Direction::Prev)?; + + while !original_range.contains_range(first_token.text_range()) { + first_token = skip_trivia_token(first_token.next_token()?, Direction::Next)?; + } + while !original_range.contains_range(last_token.text_range()) { + last_token = skip_trivia_token(last_token.prev_token()?, Direction::Prev)?; + } + + // compute original mapped token range + let extended = { + let fst_expanded = sema.descend_into_macros(first_token.clone()); + let lst_expanded = sema.descend_into_macros(last_token.clone()); + let mut lca = algo::least_common_ancestor(&fst_expanded.parent(), &lst_expanded.parent())?; + lca = shallowest_node(&lca); + if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) { + lca = lca.parent()?; + } + lca + }; + + // Compute parent node range + let validate = |token: &SyntaxToken| { + let expanded = sema.descend_into_macros(token.clone()); + algo::least_common_ancestor(&extended, &expanded.parent()).as_ref() == Some(&extended) + }; + + // Find the first and last text range under expanded parent + let first = successors(Some(first_token), |token| { + let token = token.prev_token()?; + skip_trivia_token(token, Direction::Prev) + }) + .take_while(validate) + .last()?; + + let last = successors(Some(last_token), |token| { + let token = token.next_token()?; + skip_trivia_token(token, Direction::Next) + }) + .take_while(validate) + .last()?; + + let range = first.text_range().cover(last.text_range()); + if range.contains_range(original_range) && original_range != range { + Some(range) + } else { + None + } +} + +/// Find the shallowest node with same range, which allows us to traverse siblings. +fn shallowest_node(node: &SyntaxNode) -> SyntaxNode { + node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap() +} + +fn extend_single_word_in_comment_or_string( + leaf: &SyntaxToken, + offset: TextSize, +) -> Option { + let text: &str = leaf.text(); + let cursor_position: u32 = (offset - leaf.text_range().start()).into(); + + let (before, after) = text.split_at(cursor_position as usize); + + fn non_word_char(c: char) -> bool { + !(c.is_alphanumeric() || c == '_') + } + + let start_idx = before.rfind(non_word_char)? as u32; + let end_idx = after.find(non_word_char).unwrap_or_else(|| after.len()) as u32; + + let from: TextSize = (start_idx + 1).into(); + let to: TextSize = (cursor_position + end_idx).into(); + + let range = TextRange::new(from, to); + if range.is_empty() { + None + } else { + Some(range + leaf.text_range().start()) + } +} + +fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextSize) -> TextRange { + let ws_text = ws.text(); + let suffix = TextRange::new(offset, ws.text_range().end()) - ws.text_range().start(); + let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start(); + let ws_suffix = &ws_text.as_str()[suffix]; + let ws_prefix = &ws_text.as_str()[prefix]; + if ws_text.contains('\n') && !ws_suffix.contains('\n') { + if let Some(node) = ws.next_sibling_or_token() { + let start = match ws_prefix.rfind('\n') { + Some(idx) => ws.text_range().start() + TextSize::from((idx + 1) as u32), + None => node.text_range().start(), + }; + let end = if root.text().char_at(node.text_range().end()) == Some('\n') { + node.text_range().end() + TextSize::of('\n') + } else { + node.text_range().end() + }; + return TextRange::new(start, end); + } + } + ws.text_range() +} + +fn pick_best(l: SyntaxToken, r: SyntaxToken) -> SyntaxToken { + return if priority(&r) > priority(&l) { r } else { l }; + fn priority(n: &SyntaxToken) -> usize { + match n.kind() { + WHITESPACE => 0, + IDENT | T![self] | T![super] | T![crate] | LIFETIME => 2, + _ => 1, + } + } +} + +/// Extend list item selection to include nearby delimiter and whitespace. +fn extend_list_item(node: &SyntaxNode) -> Option { + fn is_single_line_ws(node: &SyntaxToken) -> bool { + node.kind() == WHITESPACE && !node.text().contains('\n') + } + + fn nearby_delimiter( + delimiter_kind: SyntaxKind, + node: &SyntaxNode, + dir: Direction, + ) -> Option { + node.siblings_with_tokens(dir) + .skip(1) + .skip_while(|node| match node { + NodeOrToken::Node(_) => false, + NodeOrToken::Token(it) => is_single_line_ws(it), + }) + .next() + .and_then(|it| it.into_token()) + .filter(|node| node.kind() == delimiter_kind) + } + + let delimiter = match node.kind() { + TYPE_BOUND => T![+], + _ => T![,], + }; + + if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Next) { + // Include any following whitespace when delimiter is after list item. + let final_node = delimiter_node + .next_sibling_or_token() + .and_then(|it| it.into_token()) + .filter(|node| is_single_line_ws(node)) + .unwrap_or(delimiter_node); + + return Some(TextRange::new(node.text_range().start(), final_node.text_range().end())); + } + if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Prev) { + return Some(TextRange::new(delimiter_node.text_range().start(), node.text_range().end())); + } + + None +} + +fn extend_comments(comment: ast::Comment) -> Option { + let prev = adj_comments(&comment, Direction::Prev); + let next = adj_comments(&comment, Direction::Next); + if prev != next { + Some(TextRange::new(prev.syntax().text_range().start(), next.syntax().text_range().end())) + } else { + None + } +} + +fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment { + let mut res = comment.clone(); + for element in comment.syntax().siblings_with_tokens(dir) { + let token = match element.as_token() { + None => break, + Some(token) => token, + }; + if let Some(c) = ast::Comment::cast(token.clone()) { + res = c + } else if token.kind() != WHITESPACE || token.text().contains("\n\n") { + break; + } + } + res +} + +#[cfg(test)] +mod tests { + use crate::mock_analysis::analysis_and_position; + + use super::*; + + fn do_check(before: &str, afters: &[&str]) { + let (analysis, position) = analysis_and_position(&before); + let before = analysis.file_text(position.file_id).unwrap(); + let range = TextRange::empty(position.offset); + let mut frange = FileRange { file_id: position.file_id, range }; + + for &after in afters { + frange.range = analysis.extend_selection(frange).unwrap(); + let actual = &before[frange.range]; + assert_eq!(after, actual); + } + } + + #[test] + fn test_extend_selection_arith() { + do_check(r#"fn foo() { <|>1 + 1 }"#, &["1", "1 + 1", "{ 1 + 1 }"]); + } + + #[test] + fn test_extend_selection_list() { + do_check(r#"fn foo(<|>x: i32) {}"#, &["x", "x: i32"]); + do_check(r#"fn foo(<|>x: i32, y: i32) {}"#, &["x", "x: i32", "x: i32, "]); + do_check(r#"fn foo(<|>x: i32,y: i32) {}"#, &["x", "x: i32", "x: i32,", "(x: i32,y: i32)"]); + do_check(r#"fn foo(x: i32, <|>y: i32) {}"#, &["y", "y: i32", ", y: i32"]); + do_check(r#"fn foo(x: i32, <|>y: i32, ) {}"#, &["y", "y: i32", "y: i32, "]); + do_check(r#"fn foo(x: i32,<|>y: i32) {}"#, &["y", "y: i32", ",y: i32"]); + + do_check(r#"const FOO: [usize; 2] = [ 22<|> , 33];"#, &["22", "22 , "]); + do_check(r#"const FOO: [usize; 2] = [ 22 , 33<|>];"#, &["33", ", 33"]); + do_check(r#"const FOO: [usize; 2] = [ 22 , 33<|> ,];"#, &["33", "33 ,", "[ 22 , 33 ,]"]); + + do_check(r#"fn main() { (1, 2<|>) }"#, &["2", ", 2", "(1, 2)"]); + + do_check( + r#" +const FOO: [usize; 2] = [ + 22, + <|>33, +]"#, + &["33", "33,"], + ); + + do_check( + r#" +const FOO: [usize; 2] = [ + 22 + , 33<|>, +]"#, + &["33", "33,"], + ); + } + + #[test] + fn test_extend_selection_start_of_the_line() { + do_check( + r#" +impl S { +<|> fn foo() { + + } +}"#, + &[" fn foo() {\n\n }\n"], + ); + } + + #[test] + fn test_extend_selection_doc_comments() { + do_check( + r#" +struct A; + +/// bla +/// bla +struct B { + <|> +} + "#, + &["\n \n", "{\n \n}", "/// bla\n/// bla\nstruct B {\n \n}"], + ) + } + + #[test] + fn test_extend_selection_comments() { + do_check( + r#" +fn bar(){} + +// fn foo() { +// 1 + <|>1 +// } + +// fn foo(){} + "#, + &["1", "// 1 + 1", "// fn foo() {\n// 1 + 1\n// }"], + ); + + do_check( + r#" +// #[derive(Debug, Clone, Copy, PartialEq, Eq)] +// pub enum Direction { +// <|> Next, +// Prev +// } +"#, + &[ + "// Next,", + "// #[derive(Debug, Clone, Copy, PartialEq, Eq)]\n// pub enum Direction {\n// Next,\n// Prev\n// }", + ], + ); + + do_check( + r#" +/* +foo +_bar1<|>*/ +"#, + &["_bar1", "/*\nfoo\n_bar1*/"], + ); + + do_check(r#"//!<|>foo_2 bar"#, &["foo_2", "//!foo_2 bar"]); + + do_check(r#"/<|>/foo bar"#, &["//foo bar"]); + } + + #[test] + fn test_extend_selection_prefer_idents() { + do_check( + r#" +fn main() { foo<|>+bar;} +"#, + &["foo", "foo+bar"], + ); + do_check( + r#" +fn main() { foo+<|>bar;} +"#, + &["bar", "foo+bar"], + ); + } + + #[test] + fn test_extend_selection_prefer_lifetimes() { + do_check(r#"fn foo<<|>'a>() {}"#, &["'a", "<'a>"]); + do_check(r#"fn foo<'a<|>>() {}"#, &["'a", "<'a>"]); + } + + #[test] + fn test_extend_selection_select_first_word() { + do_check(r#"// foo bar b<|>az quxx"#, &["baz", "// foo bar baz quxx"]); + do_check( + r#" +impl S { +fn foo() { +// hel<|>lo world +} +} +"#, + &["hello", "// hello world"], + ); + } + + #[test] + fn test_extend_selection_string() { + do_check( + r#" +fn bar(){} + +" fn f<|>oo() {" +"#, + &["foo", "\" fn foo() {\""], + ); + } + + #[test] + fn test_extend_trait_bounds_list_in_where_clause() { + do_check( + r#" +fn foo() + where + R: req::Request + 'static, + R::Params: DeserializeOwned<|> + panic::UnwindSafe + 'static, + R::Result: Serialize + 'static, +"#, + &[ + "DeserializeOwned", + "DeserializeOwned + ", + "DeserializeOwned + panic::UnwindSafe + 'static", + "R::Params: DeserializeOwned + panic::UnwindSafe + 'static", + "R::Params: DeserializeOwned + panic::UnwindSafe + 'static,", + ], + ); + do_check(r#"fn foo() where T: <|>Copy"#, &["Copy"]); + do_check(r#"fn foo() where T: <|>Copy + Display"#, &["Copy", "Copy + "]); + do_check(r#"fn foo() where T: <|>Copy +Display"#, &["Copy", "Copy +"]); + do_check(r#"fn foo() where T: <|>Copy+Display"#, &["Copy", "Copy+"]); + do_check(r#"fn foo() where T: Copy + <|>Display"#, &["Display", "+ Display"]); + do_check(r#"fn foo() where T: Copy + <|>Display + Sync"#, &["Display", "Display + "]); + do_check(r#"fn foo() where T: Copy +<|>Display"#, &["Display", "+Display"]); + } + + #[test] + fn test_extend_trait_bounds_list_inline() { + do_check(r#"fn fooCopy>() {}"#, &["Copy"]); + do_check(r#"fn fooCopy + Display>() {}"#, &["Copy", "Copy + "]); + do_check(r#"fn fooCopy +Display>() {}"#, &["Copy", "Copy +"]); + do_check(r#"fn fooCopy+Display>() {}"#, &["Copy", "Copy+"]); + do_check(r#"fn fooDisplay>() {}"#, &["Display", "+ Display"]); + do_check(r#"fn fooDisplay + Sync>() {}"#, &["Display", "Display + "]); + do_check(r#"fn fooDisplay>() {}"#, &["Display", "+Display"]); + do_check( + r#"fn foo + Display, U: Copy>() {}"#, + &[ + "Copy", + "Copy + ", + "Copy + Display", + "T: Copy + Display", + "T: Copy + Display, ", + "", + ], + ); + } + + #[test] + fn test_extend_selection_on_tuple_in_type() { + do_check( + r#"fn main() { let _: (krate, <|>_crate_def_map, module_id) = (); }"#, + &["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"], + ); + // white space variations + do_check( + r#"fn main() { let _: (krate,<|>_crate_def_map,module_id) = (); }"#, + &["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"], + ); + do_check( + r#" +fn main() { let _: ( + krate, + _crate<|>_def_map, + module_id +) = (); }"#, + &[ + "_crate_def_map", + "_crate_def_map,", + "(\n krate,\n _crate_def_map,\n module_id\n)", + ], + ); + } + + #[test] + fn test_extend_selection_on_tuple_in_rvalue() { + do_check( + r#"fn main() { let var = (krate, _crate_def_map<|>, module_id); }"#, + &["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"], + ); + // white space variations + do_check( + r#"fn main() { let var = (krate,_crate<|>_def_map,module_id); }"#, + &["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"], + ); + do_check( + r#" +fn main() { let var = ( + krate, + _crate_def_map<|>, + module_id +); }"#, + &[ + "_crate_def_map", + "_crate_def_map,", + "(\n krate,\n _crate_def_map,\n module_id\n)", + ], + ); + } + + #[test] + fn test_extend_selection_on_tuple_pat() { + do_check( + r#"fn main() { let (krate, _crate_def_map<|>, module_id) = var; }"#, + &["_crate_def_map", "_crate_def_map, ", "(krate, _crate_def_map, module_id)"], + ); + // white space variations + do_check( + r#"fn main() { let (krate,_crate<|>_def_map,module_id) = var; }"#, + &["_crate_def_map", "_crate_def_map,", "(krate,_crate_def_map,module_id)"], + ); + do_check( + r#" +fn main() { let ( + krate, + _crate_def_map<|>, + module_id +) = var; }"#, + &[ + "_crate_def_map", + "_crate_def_map,", + "(\n krate,\n _crate_def_map,\n module_id\n)", + ], + ); + } + + #[test] + fn extend_selection_inside_macros() { + do_check( + r#"macro_rules! foo { ($item:item) => {$item} } + foo!{fn hello(na<|>me:usize){}}"#, + &[ + "name", + "name:usize", + "(name:usize)", + "fn hello(name:usize){}", + "{fn hello(name:usize){}}", + "foo!{fn hello(name:usize){}}", + ], + ); + } + + #[test] + fn extend_selection_inside_recur_macros() { + do_check( + r#" macro_rules! foo2 { ($item:item) => {$item} } + macro_rules! foo { ($item:item) => {foo2!($item);} } + foo!{fn hello(na<|>me:usize){}}"#, + &[ + "name", + "name:usize", + "(name:usize)", + "fn hello(name:usize){}", + "{fn hello(name:usize){}}", + "foo!{fn hello(name:usize){}}", + ], + ); + } +} diff --git a/crates/ide/src/file_structure.rs b/crates/ide/src/file_structure.rs new file mode 100644 index 000000000..c90247ba6 --- /dev/null +++ b/crates/ide/src/file_structure.rs @@ -0,0 +1,431 @@ +use syntax::{ + ast::{self, AttrsOwner, GenericParamsOwner, NameOwner}, + match_ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, TextRange, WalkEvent, +}; + +#[derive(Debug, Clone)] +pub struct StructureNode { + pub parent: Option, + pub label: String, + pub navigation_range: TextRange, + pub node_range: TextRange, + pub kind: SyntaxKind, + pub detail: Option, + pub deprecated: bool, +} + +// Feature: File Structure +// +// Provides a tree of the symbols defined in the file. Can be used to +// +// * fuzzy search symbol in a file (super useful) +// * draw breadcrumbs to describe the context around the cursor +// * draw outline of the file +// +// |=== +// | Editor | Shortcut +// +// | VS Code | kbd:[Ctrl+Shift+O] +// |=== +pub fn file_structure(file: &SourceFile) -> Vec { + let mut res = Vec::new(); + let mut stack = Vec::new(); + + for event in file.syntax().preorder() { + match event { + WalkEvent::Enter(node) => { + if let Some(mut symbol) = structure_node(&node) { + symbol.parent = stack.last().copied(); + stack.push(res.len()); + res.push(symbol); + } + } + WalkEvent::Leave(node) => { + if structure_node(&node).is_some() { + stack.pop().unwrap(); + } + } + } + } + res +} + +fn structure_node(node: &SyntaxNode) -> Option { + fn decl(node: N) -> Option { + decl_with_detail(&node, None) + } + + fn decl_with_type_ref( + node: &N, + type_ref: Option, + ) -> Option { + let detail = type_ref.map(|type_ref| { + let mut detail = String::new(); + collapse_ws(type_ref.syntax(), &mut detail); + detail + }); + decl_with_detail(node, detail) + } + + fn decl_with_detail( + node: &N, + detail: Option, + ) -> Option { + let name = node.name()?; + + Some(StructureNode { + parent: None, + label: name.text().to_string(), + navigation_range: name.syntax().text_range(), + node_range: node.syntax().text_range(), + kind: node.syntax().kind(), + detail, + deprecated: node.attrs().filter_map(|x| x.simple_name()).any(|x| x == "deprecated"), + }) + } + + fn collapse_ws(node: &SyntaxNode, output: &mut String) { + let mut can_insert_ws = false; + node.text().for_each_chunk(|chunk| { + for line in chunk.lines() { + let line = line.trim(); + if line.is_empty() { + if can_insert_ws { + output.push(' '); + can_insert_ws = false; + } + } else { + output.push_str(line); + can_insert_ws = true; + } + } + }) + } + + match_ast! { + match node { + ast::Fn(it) => { + let mut detail = String::from("fn"); + if let Some(type_param_list) = it.generic_param_list() { + collapse_ws(type_param_list.syntax(), &mut detail); + } + if let Some(param_list) = it.param_list() { + collapse_ws(param_list.syntax(), &mut detail); + } + if let Some(ret_type) = it.ret_type() { + detail.push_str(" "); + collapse_ws(ret_type.syntax(), &mut detail); + } + + decl_with_detail(&it, Some(detail)) + }, + ast::Struct(it) => decl(it), + ast::Union(it) => decl(it), + ast::Enum(it) => decl(it), + ast::Variant(it) => decl(it), + ast::Trait(it) => decl(it), + ast::Module(it) => decl(it), + ast::TypeAlias(it) => decl_with_type_ref(&it, it.ty()), + ast::RecordField(it) => decl_with_type_ref(&it, it.ty()), + ast::Const(it) => decl_with_type_ref(&it, it.ty()), + ast::Static(it) => decl_with_type_ref(&it, it.ty()), + ast::Impl(it) => { + let target_type = it.self_ty()?; + let target_trait = it.trait_(); + let label = match target_trait { + None => format!("impl {}", target_type.syntax().text()), + Some(t) => { + format!("impl {} for {}", t.syntax().text(), target_type.syntax().text(),) + } + }; + + let node = StructureNode { + parent: None, + label, + navigation_range: target_type.syntax().text_range(), + node_range: it.syntax().text_range(), + kind: it.syntax().kind(), + detail: None, + deprecated: false, + }; + Some(node) + }, + ast::MacroCall(it) => { + match it.path().and_then(|it| it.segment()).and_then(|it| it.name_ref()) { + Some(path_segment) if path_segment.text() == "macro_rules" + => decl(it), + _ => None, + } + }, + _ => None, + } + } +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use super::*; + + fn check(ra_fixture: &str, expect: Expect) { + let file = SourceFile::parse(ra_fixture).ok().unwrap(); + let structure = file_structure(&file); + expect.assert_debug_eq(&structure) + } + + #[test] + fn test_file_structure() { + check( + r#" +struct Foo { + x: i32 +} + +mod m { + fn bar1() {} + fn bar2(t: T) -> T {} + fn bar3(a: A, + b: B) -> Vec< + u32 + > {} +} + +enum E { X, Y(i32) } +type T = (); +static S: i32 = 92; +const C: i32 = 92; + +impl E {} + +impl fmt::Debug for E {} + +macro_rules! mc { + () => {} +} + +#[macro_export] +macro_rules! mcexp { + () => {} +} + +/// Doc comment +macro_rules! mcexp { + () => {} +} + +#[deprecated] +fn obsolete() {} + +#[deprecated(note = "for awhile")] +fn very_obsolete() {} +"#, + expect![[r#" + [ + StructureNode { + parent: None, + label: "Foo", + navigation_range: 8..11, + node_range: 1..26, + kind: STRUCT, + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 0, + ), + label: "x", + navigation_range: 18..19, + node_range: 18..24, + kind: RECORD_FIELD, + detail: Some( + "i32", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "m", + navigation_range: 32..33, + node_range: 28..158, + kind: MODULE, + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 2, + ), + label: "bar1", + navigation_range: 43..47, + node_range: 40..52, + kind: FN, + detail: Some( + "fn()", + ), + deprecated: false, + }, + StructureNode { + parent: Some( + 2, + ), + label: "bar2", + navigation_range: 60..64, + node_range: 57..81, + kind: FN, + detail: Some( + "fn(t: T) -> T", + ), + deprecated: false, + }, + StructureNode { + parent: Some( + 2, + ), + label: "bar3", + navigation_range: 89..93, + node_range: 86..156, + kind: FN, + detail: Some( + "fn(a: A, b: B) -> Vec< u32 >", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "E", + navigation_range: 165..166, + node_range: 160..180, + kind: ENUM, + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 6, + ), + label: "X", + navigation_range: 169..170, + node_range: 169..170, + kind: VARIANT, + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 6, + ), + label: "Y", + navigation_range: 172..173, + node_range: 172..178, + kind: VARIANT, + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "T", + navigation_range: 186..187, + node_range: 181..193, + kind: TYPE_ALIAS, + detail: Some( + "()", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "S", + navigation_range: 201..202, + node_range: 194..213, + kind: STATIC, + detail: Some( + "i32", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "C", + navigation_range: 220..221, + node_range: 214..232, + kind: CONST, + detail: Some( + "i32", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "impl E", + navigation_range: 239..240, + node_range: 234..243, + kind: IMPL, + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "impl fmt::Debug for E", + navigation_range: 265..266, + node_range: 245..269, + kind: IMPL, + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "mc", + navigation_range: 284..286, + node_range: 271..303, + kind: MACRO_CALL, + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "mcexp", + navigation_range: 334..339, + node_range: 305..356, + kind: MACRO_CALL, + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "mcexp", + navigation_range: 387..392, + node_range: 358..409, + kind: MACRO_CALL, + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "obsolete", + navigation_range: 428..436, + node_range: 411..441, + kind: FN, + detail: Some( + "fn()", + ), + deprecated: true, + }, + StructureNode { + parent: None, + label: "very_obsolete", + navigation_range: 481..494, + node_range: 443..499, + kind: FN, + detail: Some( + "fn()", + ), + deprecated: true, + }, + ] + "#]], + ); + } +} diff --git a/crates/ide/src/folding_ranges.rs b/crates/ide/src/folding_ranges.rs new file mode 100644 index 000000000..7523aec55 --- /dev/null +++ b/crates/ide/src/folding_ranges.rs @@ -0,0 +1,422 @@ +//! FIXME: write short doc here + +use rustc_hash::FxHashSet; + +use syntax::{ + ast::{self, AstNode, AstToken, VisibilityOwner}, + Direction, NodeOrToken, SourceFile, + SyntaxKind::{self, *}, + SyntaxNode, TextRange, +}; + +#[derive(Debug, PartialEq, Eq)] +pub enum FoldKind { + Comment, + Imports, + Mods, + Block, + ArgList, +} + +#[derive(Debug)] +pub struct Fold { + pub range: TextRange, + pub kind: FoldKind, +} + +pub(crate) fn folding_ranges(file: &SourceFile) -> Vec { + let mut res = vec![]; + let mut visited_comments = FxHashSet::default(); + let mut visited_imports = FxHashSet::default(); + let mut visited_mods = FxHashSet::default(); + + for element in file.syntax().descendants_with_tokens() { + // Fold items that span multiple lines + if let Some(kind) = fold_kind(element.kind()) { + let is_multiline = match &element { + NodeOrToken::Node(node) => node.text().contains_char('\n'), + NodeOrToken::Token(token) => token.text().contains('\n'), + }; + if is_multiline { + res.push(Fold { range: element.text_range(), kind }); + continue; + } + } + + match element { + NodeOrToken::Token(token) => { + // Fold groups of comments + if let Some(comment) = ast::Comment::cast(token) { + if !visited_comments.contains(&comment) { + if let Some(range) = + contiguous_range_for_comment(comment, &mut visited_comments) + { + res.push(Fold { range, kind: FoldKind::Comment }) + } + } + } + } + NodeOrToken::Node(node) => { + // Fold groups of imports + if node.kind() == USE && !visited_imports.contains(&node) { + if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) { + res.push(Fold { range, kind: FoldKind::Imports }) + } + } + + // Fold groups of mods + if node.kind() == MODULE && !has_visibility(&node) && !visited_mods.contains(&node) + { + if let Some(range) = + contiguous_range_for_group_unless(&node, has_visibility, &mut visited_mods) + { + res.push(Fold { range, kind: FoldKind::Mods }) + } + } + } + } + } + + res +} + +fn fold_kind(kind: SyntaxKind) -> Option { + match kind { + COMMENT => Some(FoldKind::Comment), + USE => Some(FoldKind::Imports), + ARG_LIST | PARAM_LIST => Some(FoldKind::ArgList), + ASSOC_ITEM_LIST + | RECORD_FIELD_LIST + | RECORD_PAT_FIELD_LIST + | RECORD_EXPR_FIELD_LIST + | ITEM_LIST + | EXTERN_ITEM_LIST + | USE_TREE_LIST + | BLOCK_EXPR + | MATCH_ARM_LIST + | VARIANT_LIST + | TOKEN_TREE => Some(FoldKind::Block), + _ => None, + } +} + +fn has_visibility(node: &SyntaxNode) -> bool { + ast::Module::cast(node.clone()).and_then(|m| m.visibility()).is_some() +} + +fn contiguous_range_for_group( + first: &SyntaxNode, + visited: &mut FxHashSet, +) -> Option { + contiguous_range_for_group_unless(first, |_| false, visited) +} + +fn contiguous_range_for_group_unless( + first: &SyntaxNode, + unless: impl Fn(&SyntaxNode) -> bool, + visited: &mut FxHashSet, +) -> Option { + visited.insert(first.clone()); + + let mut last = first.clone(); + for element in first.siblings_with_tokens(Direction::Next) { + let node = match element { + NodeOrToken::Token(token) => { + if let Some(ws) = ast::Whitespace::cast(token) { + if !ws.spans_multiple_lines() { + // Ignore whitespace without blank lines + continue; + } + } + // There is a blank line or another token, which means that the + // group ends here + break; + } + NodeOrToken::Node(node) => node, + }; + + // Stop if we find a node that doesn't belong to the group + if node.kind() != first.kind() || unless(&node) { + break; + } + + visited.insert(node.clone()); + last = node; + } + + if first != &last { + Some(TextRange::new(first.text_range().start(), last.text_range().end())) + } else { + // The group consists of only one element, therefore it cannot be folded + None + } +} + +fn contiguous_range_for_comment( + first: ast::Comment, + visited: &mut FxHashSet, +) -> Option { + visited.insert(first.clone()); + + // Only fold comments of the same flavor + let group_kind = first.kind(); + if !group_kind.shape.is_line() { + return None; + } + + let mut last = first.clone(); + for element in first.syntax().siblings_with_tokens(Direction::Next) { + match element { + NodeOrToken::Token(token) => { + if let Some(ws) = ast::Whitespace::cast(token.clone()) { + if !ws.spans_multiple_lines() { + // Ignore whitespace without blank lines + continue; + } + } + if let Some(c) = ast::Comment::cast(token) { + if c.kind() == group_kind { + visited.insert(c.clone()); + last = c; + continue; + } + } + // The comment group ends because either: + // * An element of a different kind was reached + // * A comment of a different flavor was reached + break; + } + NodeOrToken::Node(_) => break, + }; + } + + if first != last { + Some(TextRange::new(first.syntax().text_range().start(), last.syntax().text_range().end())) + } else { + // The group consists of only one element, therefore it cannot be folded + None + } +} + +#[cfg(test)] +mod tests { + use test_utils::extract_tags; + + use super::*; + + fn check(ra_fixture: &str) { + let (ranges, text) = extract_tags(ra_fixture, "fold"); + + let parse = SourceFile::parse(&text); + let folds = folding_ranges(&parse.tree()); + assert_eq!( + folds.len(), + ranges.len(), + "The amount of folds is different than the expected amount" + ); + + for (fold, (range, attr)) in folds.iter().zip(ranges.into_iter()) { + assert_eq!(fold.range.start(), range.start()); + assert_eq!(fold.range.end(), range.end()); + + let kind = match fold.kind { + FoldKind::Comment => "comment", + FoldKind::Imports => "imports", + FoldKind::Mods => "mods", + FoldKind::Block => "block", + FoldKind::ArgList => "arglist", + }; + assert_eq!(kind, &attr.unwrap()); + } + } + + #[test] + fn test_fold_comments() { + check( + r#" +// Hello +// this is a multiline +// comment +// + +// But this is not + +fn main() { + // We should + // also + // fold + // this one. + //! But this one is different + //! because it has another flavor + /* As does this + multiline comment */ +}"#, + ); + } + + #[test] + fn test_fold_imports() { + check( + r#" +use std::{ + str, + vec, + io as iop +}; + +fn main() { +}"#, + ); + } + + #[test] + fn test_fold_mods() { + check( + r#" + +pub mod foo; +mod after_pub; +mod after_pub_next; + +mod before_pub; +mod before_pub_next; +pub mod bar; + +mod not_folding_single; +pub mod foobar; +pub not_folding_single_next; + +#[cfg(test)] +mod with_attribute; +mod with_attribute_next; + +fn main() { +}"#, + ); + } + + #[test] + fn test_fold_import_groups() { + check( + r#" +use std::str; +use std::vec; +use std::io as iop; + +use std::mem; +use std::f64; + +use std::collections::HashMap; +// Some random comment +use std::collections::VecDeque; + +fn main() { +}"#, + ); + } + + #[test] + fn test_fold_import_and_groups() { + check( + r#" +use std::str; +use std::vec; +use std::io as iop; + +use std::mem; +use std::f64; + +use std::collections::{ + HashMap, + VecDeque, +}; +// Some random comment + +fn main() { +}"#, + ); + } + + #[test] + fn test_folds_structs() { + check( + r#" +struct Foo { +} +"#, + ); + } + + #[test] + fn test_folds_traits() { + check( + r#" +trait Foo { +} +"#, + ); + } + + #[test] + fn test_folds_macros() { + check( + r#" +macro_rules! foo { + ($($tt:tt)*) => { $($tt)* } +} +"#, + ); + } + + #[test] + fn test_fold_match_arms() { + check( + r#" +fn main() { + match 0 { + 0 => 0, + _ => 1, + } +} +"#, + ); + } + + #[test] + fn fold_big_calls() { + check( + r#" +fn main() { + frobnicate( + 1, + 2, + 3, + ) +} +"#, + ) + } + + #[test] + fn fold_record_literals() { + check( + r#" +const _: S = S { + +}; +"#, + ) + } + + #[test] + fn fold_multiline_params() { + check( + r#" +fn foo( + x: i32, + y: String, +) {} +"#, + ) + } +} diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs new file mode 100644 index 000000000..15e9b7fad --- /dev/null +++ b/crates/ide/src/goto_definition.rs @@ -0,0 +1,989 @@ +use hir::Semantics; +use ide_db::{ + defs::{classify_name, classify_name_ref}, + symbol_index, RootDatabase, +}; +use syntax::{ + ast::{self}, + match_ast, AstNode, + SyntaxKind::*, + SyntaxToken, TokenAtOffset, T, +}; + +use crate::{ + display::{ToNav, TryToNav}, + FilePosition, NavigationTarget, RangeInfo, +}; + +// Feature: Go to Definition +// +// Navigates to the definition of an identifier. +// +// |=== +// | Editor | Shortcut +// +// | VS Code | kbd:[F12] +// |=== +pub(crate) fn goto_definition( + db: &RootDatabase, + position: FilePosition, +) -> Option>> { + let sema = Semantics::new(db); + let file = sema.parse(position.file_id).syntax().clone(); + let original_token = pick_best(file.token_at_offset(position.offset))?; + let token = sema.descend_into_macros(original_token.clone()); + let parent = token.parent(); + + let nav_targets = match_ast! { + match parent { + ast::NameRef(name_ref) => { + reference_definition(&sema, &name_ref).to_vec() + }, + ast::Name(name) => { + let def = classify_name(&sema, &name)?.definition(sema.db); + let nav = def.try_to_nav(sema.db)?; + vec![nav] + }, + _ => return None, + } + }; + + Some(RangeInfo::new(original_token.text_range(), nav_targets)) +} + +fn pick_best(tokens: TokenAtOffset) -> Option { + return tokens.max_by_key(priority); + fn priority(n: &SyntaxToken) -> usize { + match n.kind() { + IDENT | INT_NUMBER | T![self] => 2, + kind if kind.is_trivia() => 0, + _ => 1, + } + } +} + +#[derive(Debug)] +pub(crate) enum ReferenceResult { + Exact(NavigationTarget), + Approximate(Vec), +} + +impl ReferenceResult { + fn to_vec(self) -> Vec { + match self { + ReferenceResult::Exact(target) => vec![target], + ReferenceResult::Approximate(vec) => vec, + } + } +} + +pub(crate) fn reference_definition( + sema: &Semantics, + name_ref: &ast::NameRef, +) -> ReferenceResult { + let name_kind = classify_name_ref(sema, name_ref); + if let Some(def) = name_kind { + let def = def.definition(sema.db); + return match def.try_to_nav(sema.db) { + Some(nav) => ReferenceResult::Exact(nav), + None => ReferenceResult::Approximate(Vec::new()), + }; + } + + // Fallback index based approach: + let navs = symbol_index::index_resolve(sema.db, name_ref) + .into_iter() + .map(|s| s.to_nav(sema.db)) + .collect(); + ReferenceResult::Approximate(navs) +} + +#[cfg(test)] +mod tests { + use base_db::FileRange; + use syntax::{TextRange, TextSize}; + + use crate::mock_analysis::MockAnalysis; + + fn check(ra_fixture: &str) { + let (mock, position) = MockAnalysis::with_files_and_position(ra_fixture); + let (mut expected, data) = mock.annotation(); + let analysis = mock.analysis(); + match data.as_str() { + "" => (), + "file" => { + expected.range = + TextRange::up_to(TextSize::of(&*analysis.file_text(expected.file_id).unwrap())) + } + data => panic!("bad data: {}", data), + } + + let mut navs = + analysis.goto_definition(position).unwrap().expect("no definition found").info; + if navs.len() == 0 { + panic!("unresolved reference") + } + assert_eq!(navs.len(), 1); + + let nav = navs.pop().unwrap(); + assert_eq!(expected, FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }); + } + + #[test] + fn goto_def_for_extern_crate() { + check( + r#" + //- /main.rs + extern crate std<|>; + //- /std/lib.rs + // empty + //^ file + "#, + ) + } + + #[test] + fn goto_def_for_renamed_extern_crate() { + check( + r#" + //- /main.rs + extern crate std as abc<|>; + //- /std/lib.rs + // empty + //^ file + "#, + ) + } + + #[test] + fn goto_def_in_items() { + check( + r#" +struct Foo; + //^^^ +enum E { X(Foo<|>) } +"#, + ); + } + + #[test] + fn goto_def_at_start_of_item() { + check( + r#" +struct Foo; + //^^^ +enum E { X(<|>Foo) } +"#, + ); + } + + #[test] + fn goto_definition_resolves_correct_name() { + check( + r#" +//- /lib.rs +use a::Foo; +mod a; +mod b; +enum E { X(Foo<|>) } + +//- /a.rs +struct Foo; + //^^^ +//- /b.rs +struct Foo; +"#, + ); + } + + #[test] + fn goto_def_for_module_declaration() { + check( + r#" +//- /lib.rs +mod <|>foo; + +//- /foo.rs +// empty +//^ file +"#, + ); + + check( + r#" +//- /lib.rs +mod <|>foo; + +//- /foo/mod.rs +// empty +//^ file +"#, + ); + } + + #[test] + fn goto_def_for_macros() { + check( + r#" +macro_rules! foo { () => { () } } + //^^^ +fn bar() { + <|>foo!(); +} +"#, + ); + } + + #[test] + fn goto_def_for_macros_from_other_crates() { + check( + r#" +//- /lib.rs +use foo::foo; +fn bar() { + <|>foo!(); +} + +//- /foo/lib.rs +#[macro_export] +macro_rules! foo { () => { () } } + //^^^ +"#, + ); + } + + #[test] + fn goto_def_for_macros_in_use_tree() { + check( + r#" +//- /lib.rs +use foo::foo<|>; + +//- /foo/lib.rs +#[macro_export] +macro_rules! foo { () => { () } } + //^^^ +"#, + ); + } + + #[test] + fn goto_def_for_macro_defined_fn_with_arg() { + check( + r#" +//- /lib.rs +macro_rules! define_fn { + ($name:ident) => (fn $name() {}) +} + +define_fn!(foo); + //^^^ + +fn bar() { + <|>foo(); +} +"#, + ); + } + + #[test] + fn goto_def_for_macro_defined_fn_no_arg() { + check( + r#" +//- /lib.rs +macro_rules! define_fn { + () => (fn foo() {}) +} + + define_fn!(); +//^^^^^^^^^^^^^ + +fn bar() { + <|>foo(); +} +"#, + ); + } + + #[test] + fn goto_definition_works_for_macro_inside_pattern() { + check( + r#" +//- /lib.rs +macro_rules! foo {() => {0}} + //^^^ + +fn bar() { + match (0,1) { + (<|>foo!(), _) => {} + } +} +"#, + ); + } + + #[test] + fn goto_definition_works_for_macro_inside_match_arm_lhs() { + check( + r#" +//- /lib.rs +macro_rules! foo {() => {0}} + //^^^ +fn bar() { + match 0 { + <|>foo!() => {} + } +} +"#, + ); + } + + #[test] + fn goto_def_for_use_alias() { + check( + r#" +//- /lib.rs +use foo as bar<|>; + +//- /foo/lib.rs +// empty +//^ file +"#, + ); + } + + #[test] + fn goto_def_for_use_alias_foo_macro() { + check( + r#" +//- /lib.rs +use foo::foo as bar<|>; + +//- /foo/lib.rs +#[macro_export] +macro_rules! foo { () => { () } } + //^^^ +"#, + ); + } + + #[test] + fn goto_def_for_methods() { + check( + r#" +//- /lib.rs +struct Foo; +impl Foo { + fn frobnicate(&self) { } + //^^^^^^^^^^ +} + +fn bar(foo: &Foo) { + foo.frobnicate<|>(); +} +"#, + ); + } + + #[test] + fn goto_def_for_fields() { + check( + r#" +struct Foo { + spam: u32, +} //^^^^ + +fn bar(foo: &Foo) { + foo.spam<|>; +} +"#, + ); + } + + #[test] + fn goto_def_for_record_fields() { + check( + r#" +//- /lib.rs +struct Foo { + spam: u32, +} //^^^^ + +fn bar() -> Foo { + Foo { + spam<|>: 0, + } +} +"#, + ); + } + + #[test] + fn goto_def_for_record_pat_fields() { + check( + r#" +//- /lib.rs +struct Foo { + spam: u32, +} //^^^^ + +fn bar(foo: Foo) -> Foo { + let Foo { spam<|>: _, } = foo +} +"#, + ); + } + + #[test] + fn goto_def_for_record_fields_macros() { + check( + r" +macro_rules! m { () => { 92 };} +struct Foo { spam: u32 } + //^^^^ + +fn bar() -> Foo { + Foo { spam<|>: m!() } +} +", + ); + } + + #[test] + fn goto_for_tuple_fields() { + check( + r#" +struct Foo(u32); + //^^^ + +fn bar() { + let foo = Foo(0); + foo.<|>0; +} +"#, + ); + } + + #[test] + fn goto_def_for_ufcs_inherent_methods() { + check( + r#" +struct Foo; +impl Foo { + fn frobnicate() { } +} //^^^^^^^^^^ + +fn bar(foo: &Foo) { + Foo::frobnicate<|>(); +} +"#, + ); + } + + #[test] + fn goto_def_for_ufcs_trait_methods_through_traits() { + check( + r#" +trait Foo { + fn frobnicate(); +} //^^^^^^^^^^ + +fn bar() { + Foo::frobnicate<|>(); +} +"#, + ); + } + + #[test] + fn goto_def_for_ufcs_trait_methods_through_self() { + check( + r#" +struct Foo; +trait Trait { + fn frobnicate(); +} //^^^^^^^^^^ +impl Trait for Foo {} + +fn bar() { + Foo::frobnicate<|>(); +} +"#, + ); + } + + #[test] + fn goto_definition_on_self() { + check( + r#" +struct Foo; +impl Foo { + //^^^ + pub fn new() -> Self { + Self<|> {} + } +} +"#, + ); + check( + r#" +struct Foo; +impl Foo { + //^^^ + pub fn new() -> Self<|> { + Self {} + } +} +"#, + ); + + check( + r#" +enum Foo { A } +impl Foo { + //^^^ + pub fn new() -> Self<|> { + Foo::A + } +} +"#, + ); + + check( + r#" +enum Foo { A } +impl Foo { + //^^^ + pub fn thing(a: &Self<|>) { + } +} +"#, + ); + } + + #[test] + fn goto_definition_on_self_in_trait_impl() { + check( + r#" +struct Foo; +trait Make { + fn new() -> Self; +} +impl Make for Foo { + //^^^ + fn new() -> Self { + Self<|> {} + } +} +"#, + ); + + check( + r#" +struct Foo; +trait Make { + fn new() -> Self; +} +impl Make for Foo { + //^^^ + fn new() -> Self<|> { + Self {} + } +} +"#, + ); + } + + #[test] + fn goto_def_when_used_on_definition_name_itself() { + check( + r#" +struct Foo<|> { value: u32 } + //^^^ + "#, + ); + + check( + r#" +struct Foo { + field<|>: string, +} //^^^^^ +"#, + ); + + check( + r#" +fn foo_test<|>() { } + //^^^^^^^^ +"#, + ); + + check( + r#" +enum Foo<|> { Variant } + //^^^ +"#, + ); + + check( + r#" +enum Foo { + Variant1, + Variant2<|>, + //^^^^^^^^ + Variant3, +} +"#, + ); + + check( + r#" +static INNER<|>: &str = ""; + //^^^^^ +"#, + ); + + check( + r#" +const INNER<|>: &str = ""; + //^^^^^ +"#, + ); + + check( + r#" +type Thing<|> = Option<()>; + //^^^^^ +"#, + ); + + check( + r#" +trait Foo<|> { } + //^^^ +"#, + ); + + check( + r#" +mod bar<|> { } + //^^^ +"#, + ); + } + + #[test] + fn goto_from_macro() { + check( + r#" +macro_rules! id { + ($($tt:tt)*) => { $($tt)* } +} +fn foo() {} + //^^^ +id! { + fn bar() { + fo<|>o(); + } +} +mod confuse_index { fn foo(); } +"#, + ); + } + + #[test] + fn goto_through_format() { + check( + r#" +#[macro_export] +macro_rules! format { + ($($arg:tt)*) => ($crate::fmt::format($crate::__export::format_args!($($arg)*))) +} +#[rustc_builtin_macro] +#[macro_export] +macro_rules! format_args { + ($fmt:expr) => ({ /* compiler built-in */ }); + ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ }) +} +pub mod __export { + pub use crate::format_args; + fn foo() {} // for index confusion +} +fn foo() -> i8 {} + //^^^ +fn test() { + format!("{}", fo<|>o()) +} +"#, + ); + } + + #[test] + fn goto_for_type_param() { + check( + r#" +struct Foo { t: <|>T } + //^ +"#, + ); + } + + #[test] + fn goto_within_macro() { + check( + r#" +macro_rules! id { + ($($tt:tt)*) => ($($tt)*) +} + +fn foo() { + let x = 1; + //^ + id!({ + let y = <|>x; + let z = y; + }); +} +"#, + ); + + check( + r#" +macro_rules! id { + ($($tt:tt)*) => ($($tt)*) +} + +fn foo() { + let x = 1; + id!({ + let y = x; + //^ + let z = <|>y; + }); +} +"#, + ); + } + + #[test] + fn goto_def_in_local_fn() { + check( + r#" +fn main() { + fn foo() { + let x = 92; + //^ + <|>x; + } +} +"#, + ); + } + + #[test] + fn goto_def_in_local_macro() { + check( + r#" +fn bar() { + macro_rules! foo { () => { () } } + //^^^ + <|>foo!(); +} +"#, + ); + } + + #[test] + fn goto_def_for_field_init_shorthand() { + check( + r#" +struct Foo { x: i32 } +fn main() { + let x = 92; + //^ + Foo { x<|> }; +} +"#, + ) + } + + #[test] + fn goto_def_for_enum_variant_field() { + check( + r#" +enum Foo { + Bar { x: i32 } +} //^ +fn baz(foo: Foo) { + match foo { + Foo::Bar { x<|> } => x + }; +} +"#, + ); + } + + #[test] + fn goto_def_for_enum_variant_self_pattern_const() { + check( + r#" +enum Foo { Bar } + //^^^ +impl Foo { + fn baz(self) { + match self { Self::Bar<|> => {} } + } +} +"#, + ); + } + + #[test] + fn goto_def_for_enum_variant_self_pattern_record() { + check( + r#" +enum Foo { Bar { val: i32 } } + //^^^ +impl Foo { + fn baz(self) -> i32 { + match self { Self::Bar<|> { val } => {} } + } +} +"#, + ); + } + + #[test] + fn goto_def_for_enum_variant_self_expr_const() { + check( + r#" +enum Foo { Bar } + //^^^ +impl Foo { + fn baz(self) { Self::Bar<|>; } +} +"#, + ); + } + + #[test] + fn goto_def_for_enum_variant_self_expr_record() { + check( + r#" +enum Foo { Bar { val: i32 } } + //^^^ +impl Foo { + fn baz(self) { Self::Bar<|> {val: 4}; } +} +"#, + ); + } + + #[test] + fn goto_def_for_type_alias_generic_parameter() { + check( + r#" +type Alias = T<|>; + //^ +"#, + ) + } + + #[test] + fn goto_def_for_macro_container() { + check( + r#" +//- /lib.rs +foo::module<|>::mac!(); + +//- /foo/lib.rs +pub mod module { + //^^^^^^ + #[macro_export] + macro_rules! _mac { () => { () } } + pub use crate::_mac as mac; +} +"#, + ); + } + + #[test] + fn goto_def_for_assoc_ty_in_path() { + check( + r#" +trait Iterator { + type Item; + //^^^^ +} + +fn f() -> impl Iterator = u8> {} +"#, + ); + } + + #[test] + fn goto_def_for_assoc_ty_in_path_multiple() { + check( + r#" +trait Iterator { + type A; + //^ + type B; +} + +fn f() -> impl Iterator = u8, B = ()> {} +"#, + ); + check( + r#" +trait Iterator { + type A; + type B; + //^ +} + +fn f() -> impl Iterator = ()> {} +"#, + ); + } + + #[test] + fn goto_def_for_assoc_ty_ufcs() { + check( + r#" +trait Iterator { + type Item; + //^^^^ +} + +fn g() -> <() as Iterator = ()>>::Item {} +"#, + ); + } + + #[test] + fn goto_def_for_assoc_ty_ufcs_multiple() { + check( + r#" +trait Iterator { + type A; + //^ + type B; +} + +fn g() -> <() as Iterator = (), B = u8>>::B {} +"#, + ); + check( + r#" +trait Iterator { + type A; + type B; + //^ +} + +fn g() -> <() as Iterator = u8>>::A {} +"#, + ); + } +} diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs new file mode 100644 index 000000000..f503f4ec5 --- /dev/null +++ b/crates/ide/src/goto_implementation.rs @@ -0,0 +1,229 @@ +use hir::{Crate, ImplDef, Semantics}; +use ide_db::RootDatabase; +use syntax::{algo::find_node_at_offset, ast, AstNode}; + +use crate::{display::ToNav, FilePosition, NavigationTarget, RangeInfo}; + +// Feature: Go to Implementation +// +// Navigates to the impl block of structs, enums or traits. Also implemented as a code lens. +// +// |=== +// | Editor | Shortcut +// +// | VS Code | kbd:[Ctrl+F12] +// |=== +pub(crate) fn goto_implementation( + db: &RootDatabase, + position: FilePosition, +) -> Option>> { + let sema = Semantics::new(db); + let source_file = sema.parse(position.file_id); + let syntax = source_file.syntax().clone(); + + let krate = sema.to_module_def(position.file_id)?.krate(); + + if let Some(nominal_def) = find_node_at_offset::(&syntax, position.offset) { + return Some(RangeInfo::new( + nominal_def.syntax().text_range(), + impls_for_def(&sema, &nominal_def, krate)?, + )); + } else if let Some(trait_def) = find_node_at_offset::(&syntax, position.offset) { + return Some(RangeInfo::new( + trait_def.syntax().text_range(), + impls_for_trait(&sema, &trait_def, krate)?, + )); + } + + None +} + +fn impls_for_def( + sema: &Semantics, + node: &ast::AdtDef, + krate: Crate, +) -> Option> { + let ty = match node { + ast::AdtDef::Struct(def) => sema.to_def(def)?.ty(sema.db), + ast::AdtDef::Enum(def) => sema.to_def(def)?.ty(sema.db), + ast::AdtDef::Union(def) => sema.to_def(def)?.ty(sema.db), + }; + + let impls = ImplDef::all_in_crate(sema.db, krate); + + Some( + impls + .into_iter() + .filter(|impl_def| ty.is_equal_for_find_impls(&impl_def.target_ty(sema.db))) + .map(|imp| imp.to_nav(sema.db)) + .collect(), + ) +} + +fn impls_for_trait( + sema: &Semantics, + node: &ast::Trait, + krate: Crate, +) -> Option> { + let tr = sema.to_def(node)?; + + let impls = ImplDef::for_trait(sema.db, krate, tr); + + Some(impls.into_iter().map(|imp| imp.to_nav(sema.db)).collect()) +} + +#[cfg(test)] +mod tests { + use base_db::FileRange; + + use crate::mock_analysis::MockAnalysis; + + fn check(ra_fixture: &str) { + let (mock, position) = MockAnalysis::with_files_and_position(ra_fixture); + let annotations = mock.annotations(); + let analysis = mock.analysis(); + + let navs = analysis.goto_implementation(position).unwrap().unwrap().info; + + let key = |frange: &FileRange| (frange.file_id, frange.range.start()); + + let mut expected = annotations + .into_iter() + .map(|(range, data)| { + assert!(data.is_empty()); + range + }) + .collect::>(); + expected.sort_by_key(key); + + let mut actual = navs + .into_iter() + .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) + .collect::>(); + actual.sort_by_key(key); + + assert_eq!(expected, actual); + } + + #[test] + fn goto_implementation_works() { + check( + r#" +struct Foo<|>; +impl Foo {} + //^^^ +"#, + ); + } + + #[test] + fn goto_implementation_works_multiple_blocks() { + check( + r#" +struct Foo<|>; +impl Foo {} + //^^^ +impl Foo {} + //^^^ +"#, + ); + } + + #[test] + fn goto_implementation_works_multiple_mods() { + check( + r#" +struct Foo<|>; +mod a { + impl super::Foo {} + //^^^^^^^^^^ +} +mod b { + impl super::Foo {} + //^^^^^^^^^^ +} +"#, + ); + } + + #[test] + fn goto_implementation_works_multiple_files() { + check( + r#" +//- /lib.rs +struct Foo<|>; +mod a; +mod b; +//- /a.rs +impl crate::Foo {} + //^^^^^^^^^^ +//- /b.rs +impl crate::Foo {} + //^^^^^^^^^^ +"#, + ); + } + + #[test] + fn goto_implementation_for_trait() { + check( + r#" +trait T<|> {} +struct Foo; +impl T for Foo {} + //^^^ +"#, + ); + } + + #[test] + fn goto_implementation_for_trait_multiple_files() { + check( + r#" +//- /lib.rs +trait T<|> {}; +struct Foo; +mod a; +mod b; +//- /a.rs +impl crate::T for crate::Foo {} + //^^^^^^^^^^ +//- /b.rs +impl crate::T for crate::Foo {} + //^^^^^^^^^^ + "#, + ); + } + + #[test] + fn goto_implementation_all_impls() { + check( + r#" +//- /lib.rs +trait T {} +struct Foo<|>; +impl Foo {} + //^^^ +impl T for Foo {} + //^^^ +impl T for &Foo {} + //^^^^ +"#, + ); + } + + #[test] + fn goto_implementation_to_builtin_derive() { + check( + r#" + #[derive(Copy)] +//^^^^^^^^^^^^^^^ +struct Foo<|>; + +mod marker { + trait Copy {} +} +"#, + ); + } +} diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs new file mode 100644 index 000000000..4a151b150 --- /dev/null +++ b/crates/ide/src/goto_type_definition.rs @@ -0,0 +1,151 @@ +use ide_db::RootDatabase; +use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset, T}; + +use crate::{display::ToNav, FilePosition, NavigationTarget, RangeInfo}; + +// Feature: Go to Type Definition +// +// Navigates to the type of an identifier. +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Go to Type Definition* +// |=== +pub(crate) fn goto_type_definition( + db: &RootDatabase, + position: FilePosition, +) -> Option>> { + let sema = hir::Semantics::new(db); + + let file: ast::SourceFile = sema.parse(position.file_id); + let token: SyntaxToken = pick_best(file.syntax().token_at_offset(position.offset))?; + let token: SyntaxToken = sema.descend_into_macros(token); + + let (ty, node) = sema.ancestors_with_macros(token.parent()).find_map(|node| { + let ty = match_ast! { + match node { + ast::Expr(it) => sema.type_of_expr(&it)?, + ast::Pat(it) => sema.type_of_pat(&it)?, + ast::SelfParam(it) => sema.type_of_self(&it)?, + _ => return None, + } + }; + + Some((ty, node)) + })?; + + let adt_def = ty.autoderef(db).filter_map(|ty| ty.as_adt()).last()?; + + let nav = adt_def.to_nav(db); + Some(RangeInfo::new(node.text_range(), vec![nav])) +} + +fn pick_best(tokens: TokenAtOffset) -> Option { + return tokens.max_by_key(priority); + fn priority(n: &SyntaxToken) -> usize { + match n.kind() { + IDENT | INT_NUMBER | T![self] => 2, + kind if kind.is_trivia() => 0, + _ => 1, + } + } +} + +#[cfg(test)] +mod tests { + use base_db::FileRange; + + use crate::mock_analysis::MockAnalysis; + + fn check(ra_fixture: &str) { + let (mock, position) = MockAnalysis::with_files_and_position(ra_fixture); + let (expected, data) = mock.annotation(); + assert!(data.is_empty()); + let analysis = mock.analysis(); + + let mut navs = analysis.goto_type_definition(position).unwrap().unwrap().info; + assert_eq!(navs.len(), 1); + let nav = navs.pop().unwrap(); + assert_eq!(expected, FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }); + } + + #[test] + fn goto_type_definition_works_simple() { + check( + r#" +struct Foo; + //^^^ +fn foo() { + let f: Foo; f<|> +} +"#, + ); + } + + #[test] + fn goto_type_definition_works_simple_ref() { + check( + r#" +struct Foo; + //^^^ +fn foo() { + let f: &Foo; f<|> +} +"#, + ); + } + + #[test] + fn goto_type_definition_works_through_macro() { + check( + r#" +macro_rules! id { ($($tt:tt)*) => { $($tt)* } } +struct Foo {} + //^^^ +id! { + fn bar() { let f<|> = Foo {}; } +} +"#, + ); + } + + #[test] + fn goto_type_definition_for_param() { + check( + r#" +struct Foo; + //^^^ +fn foo(<|>f: Foo) {} +"#, + ); + } + + #[test] + fn goto_type_definition_for_tuple_field() { + check( + r#" +struct Foo; + //^^^ +struct Bar(Foo); +fn foo() { + let bar = Bar(Foo); + bar.<|>0; +} +"#, + ); + } + + #[test] + fn goto_def_for_self_param() { + check( + r#" +struct Foo; + //^^^ +impl Foo { + fn f(&self<|>) {} +} +"#, + ) + } +} diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs new file mode 100644 index 000000000..331aa4db0 --- /dev/null +++ b/crates/ide/src/hover.rs @@ -0,0 +1,2461 @@ +use base_db::SourceDatabase; +use hir::{ + Adt, AsAssocItem, AssocItemContainer, Documentation, FieldSource, HasSource, HirDisplay, + Module, ModuleDef, ModuleSource, Semantics, +}; +use ide_db::{ + defs::{classify_name, classify_name_ref, Definition}, + RootDatabase, +}; +use itertools::Itertools; +use stdx::format_to; +use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset, T}; +use test_utils::mark; + +use crate::{ + display::{macro_label, ShortLabel, ToNav, TryToNav}, + markup::Markup, + runnables::runnable, + FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, +}; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct HoverConfig { + pub implementations: bool, + pub run: bool, + pub debug: bool, + pub goto_type_def: bool, +} + +impl Default for HoverConfig { + fn default() -> Self { + Self { implementations: true, run: true, debug: true, goto_type_def: true } + } +} + +impl HoverConfig { + pub const NO_ACTIONS: Self = + Self { implementations: false, run: false, debug: false, goto_type_def: false }; + + pub fn any(&self) -> bool { + self.implementations || self.runnable() || self.goto_type_def + } + + pub fn none(&self) -> bool { + !self.any() + } + + pub fn runnable(&self) -> bool { + self.run || self.debug + } +} + +#[derive(Debug, Clone)] +pub enum HoverAction { + Runnable(Runnable), + Implementaion(FilePosition), + GoToType(Vec), +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct HoverGotoTypeData { + pub mod_path: String, + pub nav: NavigationTarget, +} + +/// Contains the results when hovering over an item +#[derive(Debug, Default)] +pub struct HoverResult { + pub markup: Markup, + pub actions: Vec, +} + +// Feature: Hover +// +// Shows additional information, like type of an expression or documentation for definition when "focusing" code. +// Focusing is usually hovering with a mouse, but can also be triggered with a shortcut. +pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option> { + let sema = Semantics::new(db); + let file = sema.parse(position.file_id).syntax().clone(); + let token = pick_best(file.token_at_offset(position.offset))?; + let token = sema.descend_into_macros(token); + + let mut res = HoverResult::default(); + + let node = token.parent(); + let definition = match_ast! { + match node { + ast::NameRef(name_ref) => classify_name_ref(&sema, &name_ref).map(|d| d.definition(sema.db)), + ast::Name(name) => classify_name(&sema, &name).map(|d| d.definition(sema.db)), + _ => None, + } + }; + if let Some(definition) = definition { + if let Some(markup) = hover_for_definition(db, definition) { + res.markup = markup; + if let Some(action) = show_implementations_action(db, definition) { + res.actions.push(action); + } + + if let Some(action) = runnable_action(&sema, definition, position.file_id) { + res.actions.push(action); + } + + if let Some(action) = goto_type_action(db, definition) { + res.actions.push(action); + } + + let range = sema.original_range(&node).range; + return Some(RangeInfo::new(range, res)); + } + } + + let node = token + .ancestors() + .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?; + + let ty = match_ast! { + match node { + ast::Expr(it) => sema.type_of_expr(&it)?, + ast::Pat(it) => sema.type_of_pat(&it)?, + // If this node is a MACRO_CALL, it means that `descend_into_macros` failed to resolve. + // (e.g expanding a builtin macro). So we give up here. + ast::MacroCall(_it) => return None, + _ => return None, + } + }; + + res.markup = Markup::fenced_block(&ty.display(db)); + let range = sema.original_range(&node).range; + Some(RangeInfo::new(range, res)) +} + +fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option { + fn to_action(nav_target: NavigationTarget) -> HoverAction { + HoverAction::Implementaion(FilePosition { + file_id: nav_target.file_id, + offset: nav_target.focus_or_full_range().start(), + }) + } + + match def { + Definition::ModuleDef(it) => match it { + ModuleDef::Adt(Adt::Struct(it)) => Some(to_action(it.to_nav(db))), + ModuleDef::Adt(Adt::Union(it)) => Some(to_action(it.to_nav(db))), + ModuleDef::Adt(Adt::Enum(it)) => Some(to_action(it.to_nav(db))), + ModuleDef::Trait(it) => Some(to_action(it.to_nav(db))), + _ => None, + }, + _ => None, + } +} + +fn runnable_action( + sema: &Semantics, + def: Definition, + file_id: FileId, +) -> Option { + match def { + Definition::ModuleDef(it) => match it { + ModuleDef::Module(it) => match it.definition_source(sema.db).value { + ModuleSource::Module(it) => runnable(&sema, it.syntax().clone(), file_id) + .map(|it| HoverAction::Runnable(it)), + _ => None, + }, + ModuleDef::Function(it) => { + let src = it.source(sema.db); + if src.file_id != file_id.into() { + mark::hit!(hover_macro_generated_struct_fn_doc_comment); + mark::hit!(hover_macro_generated_struct_fn_doc_attr); + + return None; + } + + runnable(&sema, src.value.syntax().clone(), file_id) + .map(|it| HoverAction::Runnable(it)) + } + _ => None, + }, + _ => None, + } +} + +fn goto_type_action(db: &RootDatabase, def: Definition) -> Option { + match def { + Definition::Local(it) => { + let mut targets: Vec = Vec::new(); + let mut push_new_def = |item: ModuleDef| { + if !targets.contains(&item) { + targets.push(item); + } + }; + + it.ty(db).walk(db, |t| { + if let Some(adt) = t.as_adt() { + push_new_def(adt.into()); + } else if let Some(trait_) = t.as_dyn_trait() { + push_new_def(trait_.into()); + } else if let Some(traits) = t.as_impl_traits(db) { + traits.into_iter().for_each(|it| push_new_def(it.into())); + } else if let Some(trait_) = t.as_associated_type_parent_trait(db) { + push_new_def(trait_.into()); + } + }); + + let targets = targets + .into_iter() + .filter_map(|it| { + Some(HoverGotoTypeData { + mod_path: render_path( + db, + it.module(db)?, + it.name(db).map(|name| name.to_string()), + ), + nav: it.try_to_nav(db)?, + }) + }) + .collect(); + + Some(HoverAction::GoToType(targets)) + } + _ => None, + } +} + +fn hover_markup( + docs: Option, + desc: Option, + mod_path: Option, +) -> Option { + match desc { + Some(desc) => { + let mut buf = String::new(); + + if let Some(mod_path) = mod_path { + if !mod_path.is_empty() { + format_to!(buf, "```rust\n{}\n```\n\n", mod_path); + } + } + format_to!(buf, "```rust\n{}\n```", desc); + + if let Some(doc) = docs { + format_to!(buf, "\n___\n\n{}", doc); + } + Some(buf.into()) + } + None => docs.map(Markup::from), + } +} + +fn definition_owner_name(db: &RootDatabase, def: &Definition) -> Option { + match def { + Definition::Field(f) => Some(f.parent_def(db).name(db)), + Definition::Local(l) => l.parent(db).name(db), + Definition::ModuleDef(md) => match md { + ModuleDef::Function(f) => match f.as_assoc_item(db)?.container(db) { + AssocItemContainer::Trait(t) => Some(t.name(db)), + AssocItemContainer::ImplDef(i) => i.target_ty(db).as_adt().map(|adt| adt.name(db)), + }, + ModuleDef::EnumVariant(e) => Some(e.parent_enum(db).name(db)), + _ => None, + }, + Definition::SelfType(i) => i.target_ty(db).as_adt().map(|adt| adt.name(db)), + _ => None, + } + .map(|name| name.to_string()) +} + +fn render_path(db: &RootDatabase, module: Module, item_name: Option) -> String { + let crate_name = + db.crate_graph()[module.krate().into()].display_name.as_ref().map(ToString::to_string); + let module_path = module + .path_to_root(db) + .into_iter() + .rev() + .flat_map(|it| it.name(db).map(|name| name.to_string())); + crate_name.into_iter().chain(module_path).chain(item_name).join("::") +} + +fn definition_mod_path(db: &RootDatabase, def: &Definition) -> Option { + def.module(db).map(|module| render_path(db, module, definition_owner_name(db, def))) +} + +fn hover_for_definition(db: &RootDatabase, def: Definition) -> Option { + let mod_path = definition_mod_path(db, &def); + return match def { + Definition::Macro(it) => { + let src = it.source(db); + let docs = Documentation::from_ast(&src.value).map(Into::into); + hover_markup(docs, Some(macro_label(&src.value)), mod_path) + } + Definition::Field(it) => { + let src = it.source(db); + match src.value { + FieldSource::Named(it) => { + let docs = Documentation::from_ast(&it).map(Into::into); + hover_markup(docs, it.short_label(), mod_path) + } + _ => None, + } + } + Definition::ModuleDef(it) => match it { + ModuleDef::Module(it) => match it.definition_source(db).value { + ModuleSource::Module(it) => { + let docs = Documentation::from_ast(&it).map(Into::into); + hover_markup(docs, it.short_label(), mod_path) + } + ModuleSource::SourceFile(it) => { + let docs = Documentation::from_ast(&it).map(Into::into); + hover_markup(docs, it.short_label(), mod_path) + } + }, + ModuleDef::Function(it) => from_def_source(db, it, mod_path), + ModuleDef::Adt(Adt::Struct(it)) => from_def_source(db, it, mod_path), + ModuleDef::Adt(Adt::Union(it)) => from_def_source(db, it, mod_path), + ModuleDef::Adt(Adt::Enum(it)) => from_def_source(db, it, mod_path), + ModuleDef::EnumVariant(it) => from_def_source(db, it, mod_path), + ModuleDef::Const(it) => from_def_source(db, it, mod_path), + ModuleDef::Static(it) => from_def_source(db, it, mod_path), + ModuleDef::Trait(it) => from_def_source(db, it, mod_path), + ModuleDef::TypeAlias(it) => from_def_source(db, it, mod_path), + ModuleDef::BuiltinType(it) => return Some(it.to_string().into()), + }, + Definition::Local(it) => return Some(Markup::fenced_block(&it.ty(db).display(db))), + Definition::TypeParam(_) | Definition::SelfType(_) => { + // FIXME: Hover for generic param + None + } + }; + + fn from_def_source(db: &RootDatabase, def: D, mod_path: Option) -> Option + where + D: HasSource, + A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel + ast::AttrsOwner, + { + let src = def.source(db); + let docs = Documentation::from_ast(&src.value).map(Into::into); + hover_markup(docs, src.value.short_label(), mod_path) + } +} + +fn pick_best(tokens: TokenAtOffset) -> Option { + return tokens.max_by_key(priority); + fn priority(n: &SyntaxToken) -> usize { + match n.kind() { + IDENT | INT_NUMBER => 3, + T!['('] | T![')'] => 2, + kind if kind.is_trivia() => 0, + _ => 1, + } + } +} + +#[cfg(test)] +mod tests { + use base_db::FileLoader; + use expect::{expect, Expect}; + + use crate::mock_analysis::analysis_and_position; + + use super::*; + + fn check_hover_no_result(ra_fixture: &str) { + let (analysis, position) = analysis_and_position(ra_fixture); + assert!(analysis.hover(position).unwrap().is_none()); + } + + fn check(ra_fixture: &str, expect: Expect) { + let (analysis, position) = analysis_and_position(ra_fixture); + let hover = analysis.hover(position).unwrap().unwrap(); + + let content = analysis.db.file_text(position.file_id); + let hovered_element = &content[hover.range]; + + let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup); + expect.assert_eq(&actual) + } + + fn check_actions(ra_fixture: &str, expect: Expect) { + let (analysis, position) = analysis_and_position(ra_fixture); + let hover = analysis.hover(position).unwrap().unwrap(); + expect.assert_debug_eq(&hover.info.actions) + } + + #[test] + fn hover_shows_type_of_an_expression() { + check( + r#" +pub fn foo() -> u32 { 1 } + +fn main() { + let foo_test = foo()<|>; +} +"#, + expect![[r#" + *foo()* + ```rust + u32 + ``` + "#]], + ); + } + + #[test] + fn hover_shows_long_type_of_an_expression() { + check( + r#" +struct Scan { a: A, b: B, c: C } +struct Iter { inner: I } +enum Option { Some(T), None } + +struct OtherStruct { i: T } + +fn scan(a: A, b: B, c: C) -> Iter, B, C>> { + Iter { inner: Scan { a, b, c } } +} + +fn main() { + let num: i32 = 55; + let closure = |memo: &mut u32, value: &u32, _another: &mut u32| -> Option { + Option::Some(*memo + value) + }; + let number = 5u32; + let mut iter<|> = scan(OtherStruct { i: num }, closure, number); +} +"#, + expect![[r#" + *iter* + ```rust + Iter>, |&mut u32, &u32, &mut u32| -> Option, u32>> + ``` + "#]], + ); + } + + #[test] + fn hover_shows_fn_signature() { + // Single file with result + check( + r#" +pub fn foo() -> u32 { 1 } + +fn main() { let foo_test = fo<|>o(); } +"#, + expect![[r#" + *foo* + ```rust + pub fn foo() -> u32 + ``` + "#]], + ); + + // Multiple candidates but results are ambiguous. + check( + r#" +//- /a.rs +pub fn foo() -> u32 { 1 } + +//- /b.rs +pub fn foo() -> &str { "" } + +//- /c.rs +pub fn foo(a: u32, b: u32) {} + +//- /main.rs +mod a; +mod b; +mod c; + +fn main() { let foo_test = fo<|>o(); } + "#, + expect![[r#" + *foo* + ```rust + {unknown} + ``` + "#]], + ); + } + + #[test] + fn hover_shows_fn_signature_with_type_params() { + check( + r#" +pub fn foo<'a, T: AsRef>(b: &'a T) -> &'a str { } + +fn main() { let foo_test = fo<|>o(); } + "#, + expect![[r#" + *foo* + ```rust + pub fn foo<'a, T: AsRef>(b: &'a T) -> &'a str + ``` + "#]], + ); + } + + #[test] + fn hover_shows_fn_signature_on_fn_name() { + check( + r#" +pub fn foo<|>(a: u32, b: u32) -> u32 {} + +fn main() { } +"#, + expect![[r#" + *foo* + ```rust + pub fn foo(a: u32, b: u32) -> u32 + ``` + "#]], + ); + } + + #[test] + fn hover_shows_fn_doc() { + check( + r#" +/// # Example +/// ``` +/// # use std::path::Path; +/// # +/// foo(Path::new("hello, world!")) +/// ``` +pub fn foo<|>(_: &Path) {} + +fn main() { } +"#, + expect![[r#" + *foo* + ```rust + pub fn foo(_: &Path) + ``` + ___ + + # Example + ``` + # use std::path::Path; + # + foo(Path::new("hello, world!")) + ``` + "#]], + ); + } + + #[test] + fn hover_shows_struct_field_info() { + // Hovering over the field when instantiating + check( + r#" +struct Foo { field_a: u32 } + +fn main() { + let foo = Foo { field_a<|>: 0, }; +} +"#, + expect![[r#" + *field_a* + ```rust + Foo + ``` + + ```rust + field_a: u32 + ``` + "#]], + ); + + // Hovering over the field in the definition + check( + r#" +struct Foo { field_a<|>: u32 } + +fn main() { + let foo = Foo { field_a: 0 }; +} +"#, + expect![[r#" + *field_a* + ```rust + Foo + ``` + + ```rust + field_a: u32 + ``` + "#]], + ); + } + + #[test] + fn hover_const_static() { + check( + r#"const foo<|>: u32 = 123;"#, + expect![[r#" + *foo* + ```rust + const foo: u32 = 123 + ``` + "#]], + ); + check( + r#"static foo<|>: u32 = 456;"#, + expect![[r#" + *foo* + ```rust + static foo: u32 + ``` + "#]], + ); + } + + #[test] + fn hover_default_generic_types() { + check( + r#" +struct Test { k: K, t: T } + +fn main() { + let zz<|> = Test { t: 23u8, k: 33 }; +}"#, + expect![[r#" + *zz* + ```rust + Test + ``` + "#]], + ); + } + + #[test] + fn hover_some() { + check( + r#" +enum Option { Some(T) } +use Option::Some; + +fn main() { So<|>me(12); } +"#, + expect![[r#" + *Some* + ```rust + Option + ``` + + ```rust + Some + ``` + "#]], + ); + + check( + r#" +enum Option { Some(T) } +use Option::Some; + +fn main() { let b<|>ar = Some(12); } +"#, + expect![[r#" + *bar* + ```rust + Option + ``` + "#]], + ); + } + + #[test] + fn hover_enum_variant() { + check( + r#" +enum Option { + /// The None variant + Non<|>e +} +"#, + expect![[r#" + *None* + ```rust + Option + ``` + + ```rust + None + ``` + ___ + + The None variant + "#]], + ); + + check( + r#" +enum Option { + /// The Some variant + Some(T) +} +fn main() { + let s = Option::Som<|>e(12); +} +"#, + expect![[r#" + *Some* + ```rust + Option + ``` + + ```rust + Some + ``` + ___ + + The Some variant + "#]], + ); + } + + #[test] + fn hover_for_local_variable() { + check( + r#"fn func(foo: i32) { fo<|>o; }"#, + expect![[r#" + *foo* + ```rust + i32 + ``` + "#]], + ) + } + + #[test] + fn hover_for_local_variable_pat() { + check( + r#"fn func(fo<|>o: i32) {}"#, + expect![[r#" + *foo* + ```rust + i32 + ``` + "#]], + ) + } + + #[test] + fn hover_local_var_edge() { + check( + r#"fn func(foo: i32) { if true { <|>foo; }; }"#, + expect![[r#" + *foo* + ```rust + i32 + ``` + "#]], + ) + } + + #[test] + fn hover_for_param_edge() { + check( + r#"fn func(<|>foo: i32) {}"#, + expect![[r#" + *foo* + ```rust + i32 + ``` + "#]], + ) + } + + #[test] + fn test_hover_infer_associated_method_result() { + check( + r#" +struct Thing { x: u32 } + +impl Thing { + fn new() -> Thing { Thing { x: 0 } } +} + +fn main() { let foo_<|>test = Thing::new(); } + "#, + expect![[r#" + *foo_test* + ```rust + Thing + ``` + "#]], + ) + } + + #[test] + fn test_hover_infer_associated_method_exact() { + check( + r#" +mod wrapper { + struct Thing { x: u32 } + + impl Thing { + fn new() -> Thing { Thing { x: 0 } } + } +} + +fn main() { let foo_test = wrapper::Thing::new<|>(); } +"#, + expect![[r#" + *new* + ```rust + wrapper::Thing + ``` + + ```rust + fn new() -> Thing + ``` + "#]], + ) + } + + #[test] + fn test_hover_infer_associated_const_in_pattern() { + check( + r#" +struct X; +impl X { + const C: u32 = 1; +} + +fn main() { + match 1 { + X::C<|> => {}, + 2 => {}, + _ => {} + }; +} +"#, + expect![[r#" + *C* + ```rust + const C: u32 = 1 + ``` + "#]], + ) + } + + #[test] + fn test_hover_self() { + check( + r#" +struct Thing { x: u32 } +impl Thing { + fn new() -> Self { Self<|> { x: 0 } } +} +"#, + expect![[r#" + *Self { x: 0 }* + ```rust + Thing + ``` + "#]], + ) + } /* FIXME: revive these tests + let (analysis, position) = analysis_and_position( + " + struct Thing { x: u32 } + impl Thing { + fn new() -> Self<|> { + Self { x: 0 } + } + } + ", + ); + + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup(&hover.info.markup.as_str()), ("Thing")); + + let (analysis, position) = analysis_and_position( + " + enum Thing { A } + impl Thing { + pub fn new() -> Self<|> { + Thing::A + } + } + ", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup(&hover.info.markup.as_str()), ("enum Thing")); + + let (analysis, position) = analysis_and_position( + " + enum Thing { A } + impl Thing { + pub fn thing(a: Self<|>) { + } + } + ", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup(&hover.info.markup.as_str()), ("enum Thing")); + */ + + #[test] + fn test_hover_shadowing_pat() { + check( + r#" +fn x() {} + +fn y() { + let x = 0i32; + x<|>; +} +"#, + expect![[r#" + *x* + ```rust + i32 + ``` + "#]], + ) + } + + #[test] + fn test_hover_macro_invocation() { + check( + r#" +macro_rules! foo { () => {} } + +fn f() { fo<|>o!(); } +"#, + expect![[r#" + *foo* + ```rust + macro_rules! foo + ``` + "#]], + ) + } + + #[test] + fn test_hover_tuple_field() { + check( + r#"struct TS(String, i32<|>);"#, + expect![[r#" + *i32* + i32 + "#]], + ) + } + + #[test] + fn test_hover_through_macro() { + check( + r#" +macro_rules! id { ($($tt:tt)*) => { $($tt)* } } +fn foo() {} +id! { + fn bar() { fo<|>o(); } +} +"#, + expect![[r#" + *foo* + ```rust + fn foo() + ``` + "#]], + ); + } + + #[test] + fn test_hover_through_expr_in_macro() { + check( + r#" +macro_rules! id { ($($tt:tt)*) => { $($tt)* } } +fn foo(bar:u32) { let a = id!(ba<|>r); } +"#, + expect![[r#" + *bar* + ```rust + u32 + ``` + "#]], + ); + } + + #[test] + fn test_hover_through_expr_in_macro_recursive() { + check( + r#" +macro_rules! id_deep { ($($tt:tt)*) => { $($tt)* } } +macro_rules! id { ($($tt:tt)*) => { id_deep!($($tt)*) } } +fn foo(bar:u32) { let a = id!(ba<|>r); } +"#, + expect![[r#" + *bar* + ```rust + u32 + ``` + "#]], + ); + } + + #[test] + fn test_hover_through_func_in_macro_recursive() { + check( + r#" +macro_rules! id_deep { ($($tt:tt)*) => { $($tt)* } } +macro_rules! id { ($($tt:tt)*) => { id_deep!($($tt)*) } } +fn bar() -> u32 { 0 } +fn foo() { let a = id!([0u32, bar(<|>)] ); } +"#, + expect![[r#" + *bar()* + ```rust + u32 + ``` + "#]], + ); + } + + #[test] + fn test_hover_through_literal_string_in_macro() { + check( + r#" +macro_rules! arr { ($($tt:tt)*) => { [$($tt)*)] } } +fn foo() { + let mastered_for_itunes = ""; + let _ = arr!("Tr<|>acks", &mastered_for_itunes); +} +"#, + expect![[r#" + *"Tracks"* + ```rust + &str + ``` + "#]], + ); + } + + #[test] + fn test_hover_through_assert_macro() { + check( + r#" +#[rustc_builtin_macro] +macro_rules! assert {} + +fn bar() -> bool { true } +fn foo() { + assert!(ba<|>r()); +} +"#, + expect![[r#" + *bar* + ```rust + fn bar() -> bool + ``` + "#]], + ); + } + + #[test] + fn test_hover_through_literal_string_in_builtin_macro() { + check_hover_no_result( + r#" + #[rustc_builtin_macro] + macro_rules! format {} + + fn foo() { + format!("hel<|>lo {}", 0); + } + "#, + ); + } + + #[test] + fn test_hover_non_ascii_space_doc() { + check( + " +/// <- `\u{3000}` here +fn foo() { } + +fn bar() { fo<|>o(); } +", + expect![[r#" + *foo* + ```rust + fn foo() + ``` + ___ + + <- ` ` here + "#]], + ); + } + + #[test] + fn test_hover_function_show_qualifiers() { + check( + r#"async fn foo<|>() {}"#, + expect![[r#" + *foo* + ```rust + async fn foo() + ``` + "#]], + ); + check( + r#"pub const unsafe fn foo<|>() {}"#, + expect![[r#" + *foo* + ```rust + pub const unsafe fn foo() + ``` + "#]], + ); + check( + r#"pub(crate) async unsafe extern "C" fn foo<|>() {}"#, + expect![[r#" + *foo* + ```rust + pub(crate) async unsafe extern "C" fn foo() + ``` + "#]], + ); + } + + #[test] + fn test_hover_trait_show_qualifiers() { + check_actions( + r"unsafe trait foo<|>() {}", + expect![[r#" + [ + Implementaion( + FilePosition { + file_id: FileId( + 1, + ), + offset: 13, + }, + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_extern_crate() { + check( + r#" +//- /main.rs +extern crate st<|>d; +//- /std/lib.rs +//! Standard library for this test +//! +//! Printed? +//! abc123 + "#, + expect![[r#" + *std* + Standard library for this test + + Printed? + abc123 + "#]], + ); + check( + r#" +//- /main.rs +extern crate std as ab<|>c; +//- /std/lib.rs +//! Standard library for this test +//! +//! Printed? +//! abc123 + "#, + expect![[r#" + *abc* + Standard library for this test + + Printed? + abc123 + "#]], + ); + } + + #[test] + fn test_hover_mod_with_same_name_as_function() { + check( + r#" +use self::m<|>y::Bar; +mod my { pub struct Bar; } + +fn my() {} +"#, + expect![[r#" + *my* + ```rust + mod my + ``` + "#]], + ); + } + + #[test] + fn test_hover_struct_doc_comment() { + check( + r#" +/// bar docs +struct Bar; + +fn foo() { let bar = Ba<|>r; } +"#, + expect![[r#" + *Bar* + ```rust + struct Bar + ``` + ___ + + bar docs + "#]], + ); + } + + #[test] + fn test_hover_struct_doc_attr() { + check( + r#" +#[doc = "bar docs"] +struct Bar; + +fn foo() { let bar = Ba<|>r; } +"#, + expect![[r#" + *Bar* + ```rust + struct Bar + ``` + ___ + + bar docs + "#]], + ); + } + + #[test] + fn test_hover_struct_doc_attr_multiple_and_mixed() { + check( + r#" +/// bar docs 0 +#[doc = "bar docs 1"] +#[doc = "bar docs 2"] +struct Bar; + +fn foo() { let bar = Ba<|>r; } +"#, + expect![[r#" + *Bar* + ```rust + struct Bar + ``` + ___ + + bar docs 0 + + bar docs 1 + + bar docs 2 + "#]], + ); + } + + #[test] + fn test_hover_macro_generated_struct_fn_doc_comment() { + mark::check!(hover_macro_generated_struct_fn_doc_comment); + + check( + r#" +macro_rules! bar { + () => { + struct Bar; + impl Bar { + /// Do the foo + fn foo(&self) {} + } + } +} + +bar!(); + +fn foo() { let bar = Bar; bar.fo<|>o(); } +"#, + expect![[r#" + *foo* + ```rust + Bar + ``` + + ```rust + fn foo(&self) + ``` + ___ + + Do the foo + "#]], + ); + } + + #[test] + fn test_hover_macro_generated_struct_fn_doc_attr() { + mark::check!(hover_macro_generated_struct_fn_doc_attr); + + check( + r#" +macro_rules! bar { + () => { + struct Bar; + impl Bar { + #[doc = "Do the foo"] + fn foo(&self) {} + } + } +} + +bar!(); + +fn foo() { let bar = Bar; bar.fo<|>o(); } +"#, + expect![[r#" + *foo* + ```rust + Bar + ``` + + ```rust + fn foo(&self) + ``` + ___ + + Do the foo + "#]], + ); + } + + #[test] + fn test_hover_trait_has_impl_action() { + check_actions( + r#"trait foo<|>() {}"#, + expect![[r#" + [ + Implementaion( + FilePosition { + file_id: FileId( + 1, + ), + offset: 6, + }, + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_struct_has_impl_action() { + check_actions( + r"struct foo<|>() {}", + expect![[r#" + [ + Implementaion( + FilePosition { + file_id: FileId( + 1, + ), + offset: 7, + }, + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_union_has_impl_action() { + check_actions( + r#"union foo<|>() {}"#, + expect![[r#" + [ + Implementaion( + FilePosition { + file_id: FileId( + 1, + ), + offset: 6, + }, + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_enum_has_impl_action() { + check_actions( + r"enum foo<|>() { A, B }", + expect![[r#" + [ + Implementaion( + FilePosition { + file_id: FileId( + 1, + ), + offset: 5, + }, + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_test_has_action() { + check_actions( + r#" +#[test] +fn foo_<|>test() {} +"#, + expect![[r#" + [ + Runnable( + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..24, + focus_range: Some( + 11..19, + ), + name: "foo_test", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "foo_test", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [], + }, + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_test_mod_has_action() { + check_actions( + r#" +mod tests<|> { + #[test] + fn foo_test() {} +} +"#, + expect![[r#" + [ + Runnable( + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..46, + focus_range: Some( + 4..9, + ), + name: "tests", + kind: MODULE, + container_name: None, + description: None, + docs: None, + }, + kind: TestMod { + path: "tests", + }, + cfg_exprs: [], + }, + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_struct_has_goto_type_action() { + check_actions( + r#" +struct S{ f1: u32 } + +fn main() { let s<|>t = S{ f1:0 }; } + "#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "S", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..19, + focus_range: Some( + 7..8, + ), + name: "S", + kind: STRUCT, + container_name: None, + description: Some( + "struct S", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_generic_struct_has_goto_type_actions() { + check_actions( + r#" +struct Arg(u32); +struct S{ f1: T } + +fn main() { let s<|>t = S{ f1:Arg(0) }; } +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "S", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 17..37, + focus_range: Some( + 24..25, + ), + name: "S", + kind: STRUCT, + container_name: None, + description: Some( + "struct S", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "Arg", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..16, + focus_range: Some( + 7..10, + ), + name: "Arg", + kind: STRUCT, + container_name: None, + description: Some( + "struct Arg", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_generic_struct_has_flattened_goto_type_actions() { + check_actions( + r#" +struct Arg(u32); +struct S{ f1: T } + +fn main() { let s<|>t = S{ f1: S{ f1: Arg(0) } }; } + "#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "S", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 17..37, + focus_range: Some( + 24..25, + ), + name: "S", + kind: STRUCT, + container_name: None, + description: Some( + "struct S", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "Arg", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..16, + focus_range: Some( + 7..10, + ), + name: "Arg", + kind: STRUCT, + container_name: None, + description: Some( + "struct Arg", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_tuple_has_goto_type_actions() { + check_actions( + r#" +struct A(u32); +struct B(u32); +mod M { + pub struct C(u32); +} + +fn main() { let s<|>t = (A(1), B(2), M::C(3) ); } +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "A", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..14, + focus_range: Some( + 7..8, + ), + name: "A", + kind: STRUCT, + container_name: None, + description: Some( + "struct A", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "B", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 15..29, + focus_range: Some( + 22..23, + ), + name: "B", + kind: STRUCT, + container_name: None, + description: Some( + "struct B", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "M::C", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 42..60, + focus_range: Some( + 53..54, + ), + name: "C", + kind: STRUCT, + container_name: None, + description: Some( + "pub struct C", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_return_impl_trait_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +fn foo() -> impl Foo {} + +fn main() { let s<|>t = foo(); } +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..12, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_generic_return_impl_trait_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +struct S; +fn foo() -> impl Foo {} + +fn main() { let s<|>t = foo(); } +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..15, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "S", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 16..25, + focus_range: Some( + 23..24, + ), + name: "S", + kind: STRUCT, + container_name: None, + description: Some( + "struct S", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_return_impl_traits_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +trait Bar {} +fn foo() -> impl Foo + Bar {} + +fn main() { let s<|>t = foo(); } + "#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..12, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "Bar", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 13..25, + focus_range: Some( + 19..22, + ), + name: "Bar", + kind: TRAIT, + container_name: None, + description: Some( + "trait Bar", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_generic_return_impl_traits_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +trait Bar {} +struct S1 {} +struct S2 {} + +fn foo() -> impl Foo + Bar {} + +fn main() { let s<|>t = foo(); } +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..15, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "Bar", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 16..31, + focus_range: Some( + 22..25, + ), + name: "Bar", + kind: TRAIT, + container_name: None, + description: Some( + "trait Bar", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "S1", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 32..44, + focus_range: Some( + 39..41, + ), + name: "S1", + kind: STRUCT, + container_name: None, + description: Some( + "struct S1", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "S2", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 45..57, + focus_range: Some( + 52..54, + ), + name: "S2", + kind: STRUCT, + container_name: None, + description: Some( + "struct S2", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_arg_impl_trait_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +fn foo(ar<|>g: &impl Foo) {} +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..12, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_arg_impl_traits_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +trait Bar {} +struct S{} + +fn foo(ar<|>g: &impl Foo + Bar) {} +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..12, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "Bar", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 13..28, + focus_range: Some( + 19..22, + ), + name: "Bar", + kind: TRAIT, + container_name: None, + description: Some( + "trait Bar", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "S", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 29..39, + focus_range: Some( + 36..37, + ), + name: "S", + kind: STRUCT, + container_name: None, + description: Some( + "struct S", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_arg_generic_impl_trait_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +struct S {} +fn foo(ar<|>g: &impl Foo) {} +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..15, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "S", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 16..27, + focus_range: Some( + 23..24, + ), + name: "S", + kind: STRUCT, + container_name: None, + description: Some( + "struct S", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_dyn_return_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +struct S; +impl Foo for S {} + +struct B{} +fn foo() -> B {} + +fn main() { let s<|>t = foo(); } +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "B", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 42..55, + focus_range: Some( + 49..50, + ), + name: "B", + kind: STRUCT, + container_name: None, + description: Some( + "struct B", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..12, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_dyn_arg_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +fn foo(ar<|>g: &dyn Foo) {} +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..12, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_generic_dyn_arg_has_goto_type_action() { + check_actions( + r#" +trait Foo {} +struct S {} +fn foo(ar<|>g: &dyn Foo) {} +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..15, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "S", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 16..27, + focus_range: Some( + 23..24, + ), + name: "S", + kind: STRUCT, + container_name: None, + description: Some( + "struct S", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_goto_type_action_links_order() { + check_actions( + r#" +trait ImplTrait {} +trait DynTrait {} +struct B {} +struct S {} + +fn foo(a<|>rg: &impl ImplTrait>>>) {} + "#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "ImplTrait", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..21, + focus_range: Some( + 6..15, + ), + name: "ImplTrait", + kind: TRAIT, + container_name: None, + description: Some( + "trait ImplTrait", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "B", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 43..57, + focus_range: Some( + 50..51, + ), + name: "B", + kind: STRUCT, + container_name: None, + description: Some( + "struct B", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "DynTrait", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 22..42, + focus_range: Some( + 28..36, + ), + name: "DynTrait", + kind: TRAIT, + container_name: None, + description: Some( + "trait DynTrait", + ), + docs: None, + }, + }, + HoverGotoTypeData { + mod_path: "S", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 58..69, + focus_range: Some( + 65..66, + ), + name: "S", + kind: STRUCT, + container_name: None, + description: Some( + "struct S", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } + + #[test] + fn test_hover_associated_type_has_goto_type_action() { + check_actions( + r#" +trait Foo { + type Item; + fn get(self) -> Self::Item {} +} + +struct Bar{} +struct S{} + +impl Foo for S { type Item = Bar; } + +fn test() -> impl Foo { S {} } + +fn main() { let s<|>t = test().get(); } +"#, + expect![[r#" + [ + GoToType( + [ + HoverGotoTypeData { + mod_path: "Foo", + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 0..62, + focus_range: Some( + 6..9, + ), + name: "Foo", + kind: TRAIT, + container_name: None, + description: Some( + "trait Foo", + ), + docs: None, + }, + }, + ], + ), + ] + "#]], + ); + } +} diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs new file mode 100644 index 000000000..002adf915 --- /dev/null +++ b/crates/ide/src/inlay_hints.rs @@ -0,0 +1,927 @@ +use hir::{Adt, Callable, HirDisplay, Semantics, Type}; +use ide_db::RootDatabase; +use stdx::to_lower_snake_case; +use syntax::{ + ast::{self, ArgListOwner, AstNode}, + match_ast, Direction, NodeOrToken, SmolStr, SyntaxKind, TextRange, T, +}; + +use crate::FileId; +use ast::NameOwner; +use either::Either; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct InlayHintsConfig { + pub type_hints: bool, + pub parameter_hints: bool, + pub chaining_hints: bool, + pub max_length: Option, +} + +impl Default for InlayHintsConfig { + fn default() -> Self { + Self { type_hints: true, parameter_hints: true, chaining_hints: true, max_length: None } + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum InlayKind { + TypeHint, + ParameterHint, + ChainingHint, +} + +#[derive(Debug)] +pub struct InlayHint { + pub range: TextRange, + pub kind: InlayKind, + pub label: SmolStr, +} + +// Feature: Inlay Hints +// +// rust-analyzer shows additional information inline with the source code. +// Editors usually render this using read-only virtual text snippets interspersed with code. +// +// rust-analyzer shows hits for +// +// * types of local variables +// * names of function arguments +// * types of chained expressions +// +// **Note:** VS Code does not have native support for inlay hints https://github.com/microsoft/vscode/issues/16221[yet] and the hints are implemented using decorations. +// This approach has limitations, the caret movement and bracket highlighting near the edges of the hint may be weird: +// https://github.com/rust-analyzer/rust-analyzer/issues/1623[1], https://github.com/rust-analyzer/rust-analyzer/issues/3453[2]. +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Rust Analyzer: Toggle inlay hints* +// |=== +pub(crate) fn inlay_hints( + db: &RootDatabase, + file_id: FileId, + config: &InlayHintsConfig, +) -> Vec { + let _p = profile::span("inlay_hints"); + let sema = Semantics::new(db); + let file = sema.parse(file_id); + + let mut res = Vec::new(); + for node in file.syntax().descendants() { + if let Some(expr) = ast::Expr::cast(node.clone()) { + get_chaining_hints(&mut res, &sema, config, expr); + } + + match_ast! { + match node { + ast::CallExpr(it) => { get_param_name_hints(&mut res, &sema, config, ast::Expr::from(it)); }, + ast::MethodCallExpr(it) => { get_param_name_hints(&mut res, &sema, config, ast::Expr::from(it)); }, + ast::IdentPat(it) => { get_bind_pat_hints(&mut res, &sema, config, it); }, + _ => (), + } + } + } + res +} + +fn get_chaining_hints( + acc: &mut Vec, + sema: &Semantics, + config: &InlayHintsConfig, + expr: ast::Expr, +) -> Option<()> { + if !config.chaining_hints { + return None; + } + + if matches!(expr, ast::Expr::RecordExpr(_)) { + return None; + } + + let mut tokens = expr + .syntax() + .siblings_with_tokens(Direction::Next) + .filter_map(NodeOrToken::into_token) + .filter(|t| match t.kind() { + SyntaxKind::WHITESPACE if !t.text().contains('\n') => false, + SyntaxKind::COMMENT => false, + _ => true, + }); + + // Chaining can be defined as an expression whose next sibling tokens are newline and dot + // Ignoring extra whitespace and comments + let next = tokens.next()?.kind(); + let next_next = tokens.next()?.kind(); + if next == SyntaxKind::WHITESPACE && next_next == T![.] { + let ty = sema.type_of_expr(&expr)?; + if ty.is_unknown() { + return None; + } + if matches!(expr, ast::Expr::PathExpr(_)) { + if let Some(Adt::Struct(st)) = ty.as_adt() { + if st.fields(sema.db).is_empty() { + return None; + } + } + } + let label = ty.display_truncated(sema.db, config.max_length).to_string(); + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::ChainingHint, + label: label.into(), + }); + } + Some(()) +} + +fn get_param_name_hints( + acc: &mut Vec, + sema: &Semantics, + config: &InlayHintsConfig, + expr: ast::Expr, +) -> Option<()> { + if !config.parameter_hints { + return None; + } + + let args = match &expr { + ast::Expr::CallExpr(expr) => expr.arg_list()?.args(), + ast::Expr::MethodCallExpr(expr) => expr.arg_list()?.args(), + _ => return None, + }; + + let callable = get_callable(sema, &expr)?; + let hints = callable + .params(sema.db) + .into_iter() + .zip(args) + .filter_map(|((param, _ty), arg)| { + let param_name = match param? { + Either::Left(self_param) => self_param.to_string(), + Either::Right(pat) => match pat { + ast::Pat::IdentPat(it) => it.name()?.to_string(), + _ => return None, + }, + }; + Some((param_name, arg)) + }) + .filter(|(param_name, arg)| should_show_param_name_hint(sema, &callable, ¶m_name, &arg)) + .map(|(param_name, arg)| InlayHint { + range: arg.syntax().text_range(), + kind: InlayKind::ParameterHint, + label: param_name.into(), + }); + + acc.extend(hints); + Some(()) +} + +fn get_bind_pat_hints( + acc: &mut Vec, + sema: &Semantics, + config: &InlayHintsConfig, + pat: ast::IdentPat, +) -> Option<()> { + if !config.type_hints { + return None; + } + + let ty = sema.type_of_pat(&pat.clone().into())?; + + if should_not_display_type_hint(sema.db, &pat, &ty) { + return None; + } + + acc.push(InlayHint { + range: pat.syntax().text_range(), + kind: InlayKind::TypeHint, + label: ty.display_truncated(sema.db, config.max_length).to_string().into(), + }); + Some(()) +} + +fn pat_is_enum_variant(db: &RootDatabase, bind_pat: &ast::IdentPat, pat_ty: &Type) -> bool { + if let Some(Adt::Enum(enum_data)) = pat_ty.as_adt() { + let pat_text = bind_pat.to_string(); + enum_data + .variants(db) + .into_iter() + .map(|variant| variant.name(db).to_string()) + .any(|enum_name| enum_name == pat_text) + } else { + false + } +} + +fn should_not_display_type_hint( + db: &RootDatabase, + bind_pat: &ast::IdentPat, + pat_ty: &Type, +) -> bool { + if pat_ty.is_unknown() { + return true; + } + + if let Some(Adt::Struct(s)) = pat_ty.as_adt() { + if s.fields(db).is_empty() && s.name(db).to_string() == bind_pat.to_string() { + return true; + } + } + + for node in bind_pat.syntax().ancestors() { + match_ast! { + match node { + ast::LetStmt(it) => { + return it.ty().is_some() + }, + ast::Param(it) => { + return it.ty().is_some() + }, + ast::MatchArm(_it) => { + return pat_is_enum_variant(db, bind_pat, pat_ty); + }, + ast::IfExpr(it) => { + return it.condition().and_then(|condition| condition.pat()).is_some() + && pat_is_enum_variant(db, bind_pat, pat_ty); + }, + ast::WhileExpr(it) => { + return it.condition().and_then(|condition| condition.pat()).is_some() + && pat_is_enum_variant(db, bind_pat, pat_ty); + }, + _ => (), + } + } + } + false +} + +fn should_show_param_name_hint( + sema: &Semantics, + callable: &Callable, + param_name: &str, + argument: &ast::Expr, +) -> bool { + let param_name = param_name.trim_start_matches('_'); + let fn_name = match callable.kind() { + hir::CallableKind::Function(it) => Some(it.name(sema.db).to_string()), + hir::CallableKind::TupleStruct(_) + | hir::CallableKind::TupleEnumVariant(_) + | hir::CallableKind::Closure => None, + }; + if param_name.is_empty() + || Some(param_name) == fn_name.as_ref().map(|s| s.trim_start_matches('_')) + || is_argument_similar_to_param_name(sema, argument, param_name) + || param_name.starts_with("ra_fixture") + { + return false; + } + + // avoid displaying hints for common functions like map, filter, etc. + // or other obvious words used in std + !(callable.n_params() == 1 && is_obvious_param(param_name)) +} + +fn is_argument_similar_to_param_name( + sema: &Semantics, + argument: &ast::Expr, + param_name: &str, +) -> bool { + if is_enum_name_similar_to_param_name(sema, argument, param_name) { + return true; + } + match get_string_representation(argument) { + None => false, + Some(repr) => { + let argument_string = repr.trim_start_matches('_'); + argument_string.starts_with(param_name) || argument_string.ends_with(param_name) + } + } +} + +fn is_enum_name_similar_to_param_name( + sema: &Semantics, + argument: &ast::Expr, + param_name: &str, +) -> bool { + match sema.type_of_expr(argument).and_then(|t| t.as_adt()) { + Some(Adt::Enum(e)) => to_lower_snake_case(&e.name(sema.db).to_string()) == param_name, + _ => false, + } +} + +fn get_string_representation(expr: &ast::Expr) -> Option { + match expr { + ast::Expr::MethodCallExpr(method_call_expr) => { + Some(method_call_expr.name_ref()?.to_string()) + } + ast::Expr::RefExpr(ref_expr) => get_string_representation(&ref_expr.expr()?), + _ => Some(expr.to_string()), + } +} + +fn is_obvious_param(param_name: &str) -> bool { + let is_obvious_param_name = + matches!(param_name, "predicate" | "value" | "pat" | "rhs" | "other"); + param_name.len() == 1 || is_obvious_param_name +} + +fn get_callable(sema: &Semantics, expr: &ast::Expr) -> Option { + match expr { + ast::Expr::CallExpr(expr) => sema.type_of_expr(&expr.expr()?)?.as_callable(sema.db), + ast::Expr::MethodCallExpr(expr) => sema.resolve_method_call_as_callable(expr), + _ => None, + } +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + use test_utils::extract_annotations; + + use crate::{inlay_hints::InlayHintsConfig, mock_analysis::single_file}; + + fn check(ra_fixture: &str) { + check_with_config(InlayHintsConfig::default(), ra_fixture); + } + + fn check_with_config(config: InlayHintsConfig, ra_fixture: &str) { + let (analysis, file_id) = single_file(ra_fixture); + let expected = extract_annotations(&*analysis.file_text(file_id).unwrap()); + let inlay_hints = analysis.inlay_hints(file_id, &config).unwrap(); + let actual = + inlay_hints.into_iter().map(|it| (it.range, it.label.to_string())).collect::>(); + assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual); + } + + fn check_expect(config: InlayHintsConfig, ra_fixture: &str, expect: Expect) { + let (analysis, file_id) = single_file(ra_fixture); + let inlay_hints = analysis.inlay_hints(file_id, &config).unwrap(); + expect.assert_debug_eq(&inlay_hints) + } + + #[test] + fn param_hints_only() { + check_with_config( + InlayHintsConfig { + parameter_hints: true, + type_hints: false, + chaining_hints: false, + max_length: None, + }, + r#" +fn foo(a: i32, b: i32) -> i32 { a + b } +fn main() { + let _x = foo( + 4, + //^ a + 4, + //^ b + ); +}"#, + ); + } + + #[test] + fn hints_disabled() { + check_with_config( + InlayHintsConfig { + type_hints: false, + parameter_hints: false, + chaining_hints: false, + max_length: None, + }, + r#" +fn foo(a: i32, b: i32) -> i32 { a + b } +fn main() { + let _x = foo(4, 4); +}"#, + ); + } + + #[test] + fn type_hints_only() { + check_with_config( + InlayHintsConfig { + type_hints: true, + parameter_hints: false, + chaining_hints: false, + max_length: None, + }, + r#" +fn foo(a: i32, b: i32) -> i32 { a + b } +fn main() { + let _x = foo(4, 4); + //^^ i32 +}"#, + ); + } + + #[test] + fn default_generic_types_should_not_be_displayed() { + check( + r#" +struct Test { k: K, t: T } + +fn main() { + let zz = Test { t: 23u8, k: 33 }; + //^^ Test + let zz_ref = &zz; + //^^^^^^ &Test + let test = || zz; + //^^^^ || -> Test +}"#, + ); + } + + #[test] + fn let_statement() { + check( + r#" +#[derive(PartialEq)] +enum Option { None, Some(T) } + +#[derive(PartialEq)] +struct Test { a: Option, b: u8 } + +fn main() { + struct InnerStruct {} + + let test = 54; + //^^^^ i32 + let test: i32 = 33; + let mut test = 33; + //^^^^^^^^ i32 + let _ = 22; + let test = "test"; + //^^^^ &str + let test = InnerStruct {}; + + let test = unresolved(); + + let test = (42, 'a'); + //^^^^ (i32, char) + let (a, (b, (c,)) = (2, (3, (9.2,)); + //^ i32 ^ i32 ^ f64 + let &x = &92; + //^ i32 +}"#, + ); + } + + #[test] + fn closure_parameters() { + check( + r#" +fn main() { + let mut start = 0; + //^^^^^^^^^ i32 + (0..2).for_each(|increment| { start += increment; }); + //^^^^^^^^^ i32 + + let multiply = + //^^^^^^^^ |…| -> i32 + | a, b| a * b + //^ i32 ^ i32 + ; + + let _: i32 = multiply(1, 2); + let multiply_ref = &multiply; + //^^^^^^^^^^^^ &|…| -> i32 + + let return_42 = || 42; + //^^^^^^^^^ || -> i32 +}"#, + ); + } + + #[test] + fn for_expression() { + check( + r#" +fn main() { + let mut start = 0; + //^^^^^^^^^ i32 + for increment in 0..2 { start += increment; } + //^^^^^^^^^ i32 +}"#, + ); + } + + #[test] + fn if_expr() { + check( + r#" +enum Option { None, Some(T) } +use Option::*; + +struct Test { a: Option, b: u8 } + +fn main() { + let test = Some(Test { a: Some(3), b: 1 }); + //^^^^ Option + if let None = &test {}; + if let test = &test {}; + //^^^^ &Option + if let Some(test) = &test {}; + //^^^^ &Test + if let Some(Test { a, b }) = &test {}; + //^ &Option ^ &u8 + if let Some(Test { a: x, b: y }) = &test {}; + //^ &Option ^ &u8 + if let Some(Test { a: Some(x), b: y }) = &test {}; + //^ &u32 ^ &u8 + if let Some(Test { a: None, b: y }) = &test {}; + //^ &u8 + if let Some(Test { b: y, .. }) = &test {}; + //^ &u8 + if test == None {} +}"#, + ); + } + + #[test] + fn while_expr() { + check( + r#" +enum Option { None, Some(T) } +use Option::*; + +struct Test { a: Option, b: u8 } + +fn main() { + let test = Some(Test { a: Some(3), b: 1 }); + //^^^^ Option + while let Some(Test { a: Some(x), b: y }) = &test {}; + //^ &u32 ^ &u8 +}"#, + ); + } + + #[test] + fn match_arm_list() { + check( + r#" +enum Option { None, Some(T) } +use Option::*; + +struct Test { a: Option, b: u8 } + +fn main() { + match Some(Test { a: Some(3), b: 1 }) { + None => (), + test => (), + //^^^^ Option + Some(Test { a: Some(x), b: y }) => (), + //^ u32 ^ u8 + _ => {} + } +}"#, + ); + } + + #[test] + fn hint_truncation() { + check_with_config( + InlayHintsConfig { max_length: Some(8), ..Default::default() }, + r#" +struct Smol(T); + +struct VeryLongOuterName(T); + +fn main() { + let a = Smol(0u32); + //^ Smol + let b = VeryLongOuterName(0usize); + //^ VeryLongOuterName<…> + let c = Smol(Smol(0u32)) + //^ Smol> +}"#, + ); + } + + #[test] + fn function_call_parameter_hint() { + check( + r#" +enum Option { None, Some(T) } +use Option::*; + +struct FileId {} +struct SmolStr {} + +struct TextRange {} +struct SyntaxKind {} +struct NavigationTarget {} + +struct Test {} + +impl Test { + fn method(&self, mut param: i32) -> i32 { param * 2 } + + fn from_syntax( + file_id: FileId, + name: SmolStr, + focus_range: Option, + full_range: TextRange, + kind: SyntaxKind, + docs: Option, + ) -> NavigationTarget { + NavigationTarget {} + } +} + +fn test_func(mut foo: i32, bar: i32, msg: &str, _: i32, last: i32) -> i32 { + foo + bar +} + +fn main() { + let not_literal = 1; + //^^^^^^^^^^^ i32 + let _: i32 = test_func(1, 2, "hello", 3, not_literal); + //^ foo ^ bar ^^^^^^^ msg ^^^^^^^^^^^ last + let t: Test = Test {}; + t.method(123); + //^^^ param + Test::method(&t, 3456); + //^^ &self ^^^^ param + Test::from_syntax( + FileId {}, + //^^^^^^^^^ file_id + "impl".into(), + //^^^^^^^^^^^^^ name + None, + //^^^^ focus_range + TextRange {}, + //^^^^^^^^^^^^ full_range + SyntaxKind {}, + //^^^^^^^^^^^^^ kind + None, + //^^^^ docs + ); +}"#, + ); + } + + #[test] + fn omitted_parameters_hints_heuristics() { + check_with_config( + InlayHintsConfig { max_length: Some(8), ..Default::default() }, + r#" +fn map(f: i32) {} +fn filter(predicate: i32) {} + +struct TestVarContainer { + test_var: i32, +} + +impl TestVarContainer { + fn test_var(&self) -> i32 { + self.test_var + } +} + +struct Test {} + +impl Test { + fn map(self, f: i32) -> Self { + self + } + + fn filter(self, predicate: i32) -> Self { + self + } + + fn field(self, value: i32) -> Self { + self + } + + fn no_hints_expected(&self, _: i32, test_var: i32) {} + + fn frob(&self, frob: bool) {} +} + +struct Param {} + +fn different_order(param: &Param) {} +fn different_order_mut(param: &mut Param) {} +fn has_underscore(_param: bool) {} +fn enum_matches_param_name(completion_kind: CompletionKind) {} +fn param_destructuring_omitted_1((a, b): (u32, u32)) {} +fn param_destructuring_omitted_2(TestVarContainer { test_var: _ }: TestVarContainer) {} + +fn twiddle(twiddle: bool) {} +fn doo(_doo: bool) {} + +enum CompletionKind { + Keyword, +} + +fn main() { + let container: TestVarContainer = TestVarContainer { test_var: 42 }; + let test: Test = Test {}; + + map(22); + filter(33); + + let test_processed: Test = test.map(1).filter(2).field(3); + + let test_var: i32 = 55; + test_processed.no_hints_expected(22, test_var); + test_processed.no_hints_expected(33, container.test_var); + test_processed.no_hints_expected(44, container.test_var()); + test_processed.frob(false); + + twiddle(true); + doo(true); + + let mut param_begin: Param = Param {}; + different_order(¶m_begin); + different_order(&mut param_begin); + + let param: bool = true; + has_underscore(param); + + enum_matches_param_name(CompletionKind::Keyword); + + let a: f64 = 7.0; + let b: f64 = 4.0; + let _: f64 = a.div_euclid(b); + let _: f64 = a.abs_sub(b); + + let range: (u32, u32) = (3, 5); + param_destructuring_omitted_1(range); + param_destructuring_omitted_2(container); +}"#, + ); + } + + #[test] + fn unit_structs_have_no_type_hints() { + check_with_config( + InlayHintsConfig { max_length: Some(8), ..Default::default() }, + r#" +enum Result { Ok(T), Err(E) } +use Result::*; + +struct SyntheticSyntax; + +fn main() { + match Ok(()) { + Ok(_) => (), + Err(SyntheticSyntax) => (), + } +}"#, + ); + } + + #[test] + fn chaining_hints_ignore_comments() { + check_expect( + InlayHintsConfig { + parameter_hints: false, + type_hints: false, + chaining_hints: true, + max_length: None, + }, + r#" +struct A(B); +impl A { fn into_b(self) -> B { self.0 } } +struct B(C); +impl B { fn into_c(self) -> C { self.0 } } +struct C; + +fn main() { + let c = A(B(C)) + .into_b() // This is a comment + .into_c(); +} +"#, + expect![[r#" + [ + InlayHint { + range: 147..172, + kind: ChainingHint, + label: "B", + }, + InlayHint { + range: 147..154, + kind: ChainingHint, + label: "A", + }, + ] + "#]], + ); + } + + #[test] + fn chaining_hints_without_newlines() { + check_with_config( + InlayHintsConfig { + parameter_hints: false, + type_hints: false, + chaining_hints: true, + max_length: None, + }, + r#" +struct A(B); +impl A { fn into_b(self) -> B { self.0 } } +struct B(C); +impl B { fn into_c(self) -> C { self.0 } } +struct C; + +fn main() { + let c = A(B(C)).into_b().into_c(); +}"#, + ); + } + + #[test] + fn struct_access_chaining_hints() { + check_expect( + InlayHintsConfig { + parameter_hints: false, + type_hints: false, + chaining_hints: true, + max_length: None, + }, + r#" +struct A { pub b: B } +struct B { pub c: C } +struct C(pub bool); +struct D; + +impl D { + fn foo(&self) -> i32 { 42 } +} + +fn main() { + let x = A { b: B { c: C(true) } } + .b + .c + .0; + let x = D + .foo(); +}"#, + expect![[r#" + [ + InlayHint { + range: 143..190, + kind: ChainingHint, + label: "C", + }, + InlayHint { + range: 143..179, + kind: ChainingHint, + label: "B", + }, + ] + "#]], + ); + } + + #[test] + fn generic_chaining_hints() { + check_expect( + InlayHintsConfig { + parameter_hints: false, + type_hints: false, + chaining_hints: true, + max_length: None, + }, + r#" +struct A(T); +struct B(T); +struct C(T); +struct X(T, R); + +impl A { + fn new(t: T) -> Self { A(t) } + fn into_b(self) -> B { B(self.0) } +} +impl B { + fn into_c(self) -> C { C(self.0) } +} +fn main() { + let c = A::new(X(42, true)) + .into_b() + .into_c(); +} +"#, + expect![[r#" + [ + InlayHint { + range: 246..283, + kind: ChainingHint, + label: "B>", + }, + InlayHint { + range: 246..265, + kind: ChainingHint, + label: "A>", + }, + ] + "#]], + ); + } +} diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs new file mode 100644 index 000000000..e37702acd --- /dev/null +++ b/crates/ide/src/join_lines.rs @@ -0,0 +1,773 @@ +use assists::utils::extract_trivial_expression; +use itertools::Itertools; +use syntax::{ + algo::{find_covering_element, non_trivia_sibling}, + ast::{self, AstNode, AstToken}, + Direction, NodeOrToken, SourceFile, + SyntaxKind::{self, USE_TREE, WHITESPACE}, + SyntaxNode, SyntaxToken, TextRange, TextSize, T, +}; +use text_edit::{TextEdit, TextEditBuilder}; + +// Feature: Join Lines +// +// Join selected lines into one, smartly fixing up whitespace, trailing commas, and braces. +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Rust Analyzer: Join lines** +// |=== +pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit { + let range = if range.is_empty() { + let syntax = file.syntax(); + let text = syntax.text().slice(range.start()..); + let pos = match text.find_char('\n') { + None => return TextEdit::builder().finish(), + Some(pos) => pos, + }; + TextRange::at(range.start() + pos, TextSize::of('\n')) + } else { + range + }; + + let node = match find_covering_element(file.syntax(), range) { + NodeOrToken::Node(node) => node, + NodeOrToken::Token(token) => token.parent(), + }; + let mut edit = TextEdit::builder(); + for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) { + let range = match range.intersect(token.text_range()) { + Some(range) => range, + None => continue, + } - token.text_range().start(); + let text = token.text(); + for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') { + let pos: TextSize = (pos as u32).into(); + let off = token.text_range().start() + range.start() + pos; + if !edit.invalidates_offset(off) { + remove_newline(&mut edit, &token, off); + } + } + } + + edit.finish() +} + +fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextSize) { + if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 { + // The node is either the first or the last in the file + let suff = &token.text()[TextRange::new( + offset - token.text_range().start() + TextSize::of('\n'), + TextSize::of(token.text().as_str()), + )]; + let spaces = suff.bytes().take_while(|&b| b == b' ').count(); + + edit.replace(TextRange::at(offset, ((spaces + 1) as u32).into()), " ".to_string()); + return; + } + + // The node is between two other nodes + let prev = token.prev_sibling_or_token().unwrap(); + let next = token.next_sibling_or_token().unwrap(); + if is_trailing_comma(prev.kind(), next.kind()) { + // Removes: trailing comma, newline (incl. surrounding whitespace) + edit.delete(TextRange::new(prev.text_range().start(), token.text_range().end())); + return; + } + if prev.kind() == T![,] && next.kind() == T!['}'] { + // Removes: comma, newline (incl. surrounding whitespace) + let space = if let Some(left) = prev.prev_sibling_or_token() { + compute_ws(left.kind(), next.kind()) + } else { + " " + }; + edit.replace( + TextRange::new(prev.text_range().start(), token.text_range().end()), + space.to_string(), + ); + return; + } + + if let (Some(_), Some(next)) = ( + prev.as_token().cloned().and_then(ast::Comment::cast), + next.as_token().cloned().and_then(ast::Comment::cast), + ) { + // Removes: newline (incl. surrounding whitespace), start of the next comment + edit.delete(TextRange::new( + token.text_range().start(), + next.syntax().text_range().start() + TextSize::of(next.prefix()), + )); + return; + } + + // Special case that turns something like: + // + // ``` + // my_function({<|> + // + // }) + // ``` + // + // into `my_function()` + if join_single_expr_block(edit, token).is_some() { + return; + } + // ditto for + // + // ``` + // use foo::{<|> + // bar + // }; + // ``` + if join_single_use_tree(edit, token).is_some() { + return; + } + + // Remove newline but add a computed amount of whitespace characters + edit.replace(token.text_range(), compute_ws(prev.kind(), next.kind()).to_string()); +} + +fn has_comma_after(node: &SyntaxNode) -> bool { + match non_trivia_sibling(node.clone().into(), Direction::Next) { + Some(n) => n.kind() == T![,], + _ => false, + } +} + +fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { + let block_expr = ast::BlockExpr::cast(token.parent())?; + if !block_expr.is_standalone() { + return None; + } + let expr = extract_trivial_expression(&block_expr)?; + + let block_range = block_expr.syntax().text_range(); + let mut buf = expr.syntax().text().to_string(); + + // Match block needs to have a comma after the block + if let Some(match_arm) = block_expr.syntax().parent().and_then(ast::MatchArm::cast) { + if !has_comma_after(match_arm.syntax()) { + buf.push(','); + } + } + + edit.replace(block_range, buf); + + Some(()) +} + +fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { + let use_tree_list = ast::UseTreeList::cast(token.parent())?; + let (tree,) = use_tree_list.use_trees().collect_tuple()?; + edit.replace(use_tree_list.syntax().text_range(), tree.syntax().text().to_string()); + Some(()) +} + +fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool { + matches!((left, right), (T![,], T![')']) | (T![,], T![']'])) +} + +fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str { + match left { + T!['('] | T!['['] => return "", + T!['{'] => { + if let USE_TREE = right { + return ""; + } + } + _ => (), + } + match right { + T![')'] | T![']'] => return "", + T!['}'] => { + if let USE_TREE = left { + return ""; + } + } + T![.] => return "", + _ => (), + } + " " +} + +#[cfg(test)] +mod tests { + use syntax::SourceFile; + use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range}; + + use super::*; + + fn check_join_lines(before: &str, after: &str) { + let (before_cursor_pos, before) = extract_offset(before); + let file = SourceFile::parse(&before).ok().unwrap(); + + let range = TextRange::empty(before_cursor_pos); + let result = join_lines(&file, range); + + let actual = { + let mut actual = before.to_string(); + result.apply(&mut actual); + actual + }; + let actual_cursor_pos = result + .apply_to_offset(before_cursor_pos) + .expect("cursor position is affected by the edit"); + let actual = add_cursor(&actual, actual_cursor_pos); + assert_eq_text!(after, &actual); + } + + #[test] + fn test_join_lines_comma() { + check_join_lines( + r" +fn foo() { + <|>foo(1, + ) +} +", + r" +fn foo() { + <|>foo(1) +} +", + ); + } + + #[test] + fn test_join_lines_lambda_block() { + check_join_lines( + r" +pub fn reparse(&self, edit: &AtomTextEdit) -> File { + <|>self.incremental_reparse(edit).unwrap_or_else(|| { + self.full_reparse(edit) + }) +} +", + r" +pub fn reparse(&self, edit: &AtomTextEdit) -> File { + <|>self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) +} +", + ); + } + + #[test] + fn test_join_lines_block() { + check_join_lines( + r" +fn foo() { + foo(<|>{ + 92 + }) +}", + r" +fn foo() { + foo(<|>92) +}", + ); + } + + #[test] + fn test_join_lines_diverging_block() { + let before = r" + fn foo() { + loop { + match x { + 92 => <|>{ + continue; + } + } + } + } + "; + let after = r" + fn foo() { + loop { + match x { + 92 => <|>continue, + } + } + } + "; + check_join_lines(before, after); + } + + #[test] + fn join_lines_adds_comma_for_block_in_match_arm() { + check_join_lines( + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>{ + u.foo() + } + Err(v) => v, + } +}", + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>u.foo(), + Err(v) => v, + } +}", + ); + } + + #[test] + fn join_lines_multiline_in_block() { + check_join_lines( + r" +fn foo() { + match ty { + <|> Some(ty) => { + match ty { + _ => false, + } + } + _ => true, + } +} +", + r" +fn foo() { + match ty { + <|> Some(ty) => match ty { + _ => false, + }, + _ => true, + } +} +", + ); + } + + #[test] + fn join_lines_keeps_comma_for_block_in_match_arm() { + // We already have a comma + check_join_lines( + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>{ + u.foo() + }, + Err(v) => v, + } +}", + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>u.foo(), + Err(v) => v, + } +}", + ); + + // comma with whitespace between brace and , + check_join_lines( + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>{ + u.foo() + } , + Err(v) => v, + } +}", + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>u.foo() , + Err(v) => v, + } +}", + ); + + // comma with newline between brace and , + check_join_lines( + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>{ + u.foo() + } + , + Err(v) => v, + } +}", + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>u.foo() + , + Err(v) => v, + } +}", + ); + } + + #[test] + fn join_lines_keeps_comma_with_single_arg_tuple() { + // A single arg tuple + check_join_lines( + r" +fn foo() { + let x = (<|>{ + 4 + },); +}", + r" +fn foo() { + let x = (<|>4,); +}", + ); + + // single arg tuple with whitespace between brace and comma + check_join_lines( + r" +fn foo() { + let x = (<|>{ + 4 + } ,); +}", + r" +fn foo() { + let x = (<|>4 ,); +}", + ); + + // single arg tuple with newline between brace and comma + check_join_lines( + r" +fn foo() { + let x = (<|>{ + 4 + } + ,); +}", + r" +fn foo() { + let x = (<|>4 + ,); +}", + ); + } + + #[test] + fn test_join_lines_use_items_left() { + // No space after the '{' + check_join_lines( + r" +<|>use syntax::{ + TextSize, TextRange, +};", + r" +<|>use syntax::{TextSize, TextRange, +};", + ); + } + + #[test] + fn test_join_lines_use_items_right() { + // No space after the '}' + check_join_lines( + r" +use syntax::{ +<|> TextSize, TextRange +};", + r" +use syntax::{ +<|> TextSize, TextRange};", + ); + } + + #[test] + fn test_join_lines_use_items_right_comma() { + // No space after the '}' + check_join_lines( + r" +use syntax::{ +<|> TextSize, TextRange, +};", + r" +use syntax::{ +<|> TextSize, TextRange};", + ); + } + + #[test] + fn test_join_lines_use_tree() { + check_join_lines( + r" +use syntax::{ + algo::<|>{ + find_token_at_offset, + }, + ast, +};", + r" +use syntax::{ + algo::<|>find_token_at_offset, + ast, +};", + ); + } + + #[test] + fn test_join_lines_normal_comments() { + check_join_lines( + r" +fn foo() { + // Hello<|> + // world! +} +", + r" +fn foo() { + // Hello<|> world! +} +", + ); + } + + #[test] + fn test_join_lines_doc_comments() { + check_join_lines( + r" +fn foo() { + /// Hello<|> + /// world! +} +", + r" +fn foo() { + /// Hello<|> world! +} +", + ); + } + + #[test] + fn test_join_lines_mod_comments() { + check_join_lines( + r" +fn foo() { + //! Hello<|> + //! world! +} +", + r" +fn foo() { + //! Hello<|> world! +} +", + ); + } + + #[test] + fn test_join_lines_multiline_comments_1() { + check_join_lines( + r" +fn foo() { + // Hello<|> + /* world! */ +} +", + r" +fn foo() { + // Hello<|> world! */ +} +", + ); + } + + #[test] + fn test_join_lines_multiline_comments_2() { + check_join_lines( + r" +fn foo() { + // The<|> + /* quick + brown + fox! */ +} +", + r" +fn foo() { + // The<|> quick + brown + fox! */ +} +", + ); + } + + fn check_join_lines_sel(before: &str, after: &str) { + let (sel, before) = extract_range(before); + let parse = SourceFile::parse(&before); + let result = join_lines(&parse.tree(), sel); + let actual = { + let mut actual = before.to_string(); + result.apply(&mut actual); + actual + }; + assert_eq_text!(after, &actual); + } + + #[test] + fn test_join_lines_selection_fn_args() { + check_join_lines_sel( + r" +fn foo() { + <|>foo(1, + 2, + 3, + <|>) +} + ", + r" +fn foo() { + foo(1, 2, 3) +} + ", + ); + } + + #[test] + fn test_join_lines_selection_struct() { + check_join_lines_sel( + r" +struct Foo <|>{ + f: u32, +}<|> + ", + r" +struct Foo { f: u32 } + ", + ); + } + + #[test] + fn test_join_lines_selection_dot_chain() { + check_join_lines_sel( + r" +fn foo() { + join(<|>type_params.type_params() + .filter_map(|it| it.name()) + .map(|it| it.text())<|>) +}", + r" +fn foo() { + join(type_params.type_params().filter_map(|it| it.name()).map(|it| it.text())) +}", + ); + } + + #[test] + fn test_join_lines_selection_lambda_block_body() { + check_join_lines_sel( + r" +pub fn handle_find_matching_brace() { + params.offsets + .map(|offset| <|>{ + world.analysis().matching_brace(&file, offset).unwrap_or(offset) + }<|>) + .collect(); +}", + r" +pub fn handle_find_matching_brace() { + params.offsets + .map(|offset| world.analysis().matching_brace(&file, offset).unwrap_or(offset)) + .collect(); +}", + ); + } + + #[test] + fn test_join_lines_commented_block() { + check_join_lines( + r" +fn main() { + let _ = { + // <|>foo + // bar + 92 + }; +} + ", + r" +fn main() { + let _ = { + // <|>foo bar + 92 + }; +} + ", + ) + } + + #[test] + fn join_lines_mandatory_blocks_block() { + check_join_lines( + r" +<|>fn foo() { + 92 +} + ", + r" +<|>fn foo() { 92 +} + ", + ); + + check_join_lines( + r" +fn foo() { + <|>if true { + 92 + } +} + ", + r" +fn foo() { + <|>if true { 92 + } +} + ", + ); + + check_join_lines( + r" +fn foo() { + <|>loop { + 92 + } +} + ", + r" +fn foo() { + <|>loop { 92 + } +} + ", + ); + + check_join_lines( + r" +fn foo() { + <|>unsafe { + 92 + } +} + ", + r" +fn foo() { + <|>unsafe { 92 + } +} + ", + ); + } +} diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs new file mode 100644 index 000000000..eb6389529 --- /dev/null +++ b/crates/ide/src/lib.rs @@ -0,0 +1,542 @@ +//! ide crate provides "ide-centric" APIs for the rust-analyzer. That is, +//! it generally operates with files and text ranges, and returns results as +//! Strings, suitable for displaying to the human. +//! +//! What powers this API are the `RootDatabase` struct, which defines a `salsa` +//! database, and the `hir` crate, where majority of the analysis happens. +//! However, IDE specific bits of the analysis (most notably completion) happen +//! in this crate. + +// For proving that RootDatabase is RefUnwindSafe. +#![recursion_limit = "128"] + +#[allow(unused)] +macro_rules! eprintln { + ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; +} + +pub mod mock_analysis; + +mod markup; +mod prime_caches; +mod display; + +mod call_hierarchy; +mod call_info; +mod completion; +mod diagnostics; +mod expand_macro; +mod extend_selection; +mod file_structure; +mod folding_ranges; +mod goto_definition; +mod goto_implementation; +mod goto_type_definition; +mod hover; +mod inlay_hints; +mod join_lines; +mod matching_brace; +mod parent_module; +mod references; +mod runnables; +mod status; +mod syntax_highlighting; +mod syntax_tree; +mod typing; + +use std::sync::Arc; + +use base_db::{ + salsa::{self, ParallelDatabase}, + CheckCanceled, Env, FileLoader, FileSet, SourceDatabase, VfsPath, +}; +use cfg::CfgOptions; +use ide_db::{ + symbol_index::{self, FileSymbol}, + LineIndexDatabase, +}; +use syntax::{SourceFile, TextRange, TextSize}; + +use crate::display::ToNav; + +pub use crate::{ + call_hierarchy::CallItem, + call_info::CallInfo, + completion::{ + CompletionConfig, CompletionItem, CompletionItemKind, CompletionScore, InsertTextFormat, + }, + diagnostics::Severity, + display::NavigationTarget, + expand_macro::ExpandedMacro, + file_structure::StructureNode, + folding_ranges::{Fold, FoldKind}, + hover::{HoverAction, HoverConfig, HoverGotoTypeData, HoverResult}, + inlay_hints::{InlayHint, InlayHintsConfig, InlayKind}, + markup::Markup, + references::{Declaration, Reference, ReferenceAccess, ReferenceKind, ReferenceSearchResult}, + runnables::{Runnable, RunnableKind, TestId}, + syntax_highlighting::{ + Highlight, HighlightModifier, HighlightModifiers, HighlightTag, HighlightedRange, + }, +}; + +pub use assists::{Assist, AssistConfig, AssistId, AssistKind, ResolvedAssist}; +pub use base_db::{ + Canceled, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRoot, + SourceRootId, +}; +pub use hir::{Documentation, Semantics}; +pub use ide_db::{ + change::AnalysisChange, + line_index::{LineCol, LineIndex}, + search::SearchScope, + source_change::{FileSystemEdit, SourceChange, SourceFileEdit}, + symbol_index::Query, + RootDatabase, +}; +pub use ssr::SsrError; +pub use text_edit::{Indel, TextEdit}; + +pub type Cancelable = Result; + +#[derive(Debug)] +pub struct Diagnostic { + pub message: String, + pub range: TextRange, + pub severity: Severity, + pub fix: Option, +} + +#[derive(Debug)] +pub struct Fix { + pub label: String, + pub source_change: SourceChange, + /// Allows to trigger the fix only when the caret is in the range given + pub fix_trigger_range: TextRange, +} + +impl Fix { + pub fn new( + label: impl Into, + source_change: SourceChange, + fix_trigger_range: TextRange, + ) -> Self { + let label = label.into(); + assert!(label.starts_with(char::is_uppercase) && !label.ends_with('.')); + Self { label, source_change, fix_trigger_range } + } +} + +/// Info associated with a text range. +#[derive(Debug)] +pub struct RangeInfo { + pub range: TextRange, + pub info: T, +} + +impl RangeInfo { + pub fn new(range: TextRange, info: T) -> RangeInfo { + RangeInfo { range, info } + } +} + +/// `AnalysisHost` stores the current state of the world. +#[derive(Debug)] +pub struct AnalysisHost { + db: RootDatabase, +} + +impl AnalysisHost { + pub fn new(lru_capacity: Option) -> AnalysisHost { + AnalysisHost { db: RootDatabase::new(lru_capacity) } + } + + pub fn update_lru_capacity(&mut self, lru_capacity: Option) { + self.db.update_lru_capacity(lru_capacity); + } + + /// Returns a snapshot of the current state, which you can query for + /// semantic information. + pub fn analysis(&self) -> Analysis { + Analysis { db: self.db.snapshot() } + } + + /// Applies changes to the current state of the world. If there are + /// outstanding snapshots, they will be canceled. + pub fn apply_change(&mut self, change: AnalysisChange) { + self.db.apply_change(change) + } + + pub fn maybe_collect_garbage(&mut self) { + self.db.maybe_collect_garbage(); + } + + pub fn collect_garbage(&mut self) { + self.db.collect_garbage(); + } + /// NB: this clears the database + pub fn per_query_memory_usage(&mut self) -> Vec<(String, profile::Bytes)> { + self.db.per_query_memory_usage() + } + pub fn request_cancellation(&mut self) { + self.db.request_cancellation(); + } + pub fn raw_database(&self) -> &RootDatabase { + &self.db + } + pub fn raw_database_mut(&mut self) -> &mut RootDatabase { + &mut self.db + } +} + +impl Default for AnalysisHost { + fn default() -> AnalysisHost { + AnalysisHost::new(None) + } +} + +/// Analysis is a snapshot of a world state at a moment in time. It is the main +/// entry point for asking semantic information about the world. When the world +/// state is advanced using `AnalysisHost::apply_change` method, all existing +/// `Analysis` are canceled (most method return `Err(Canceled)`). +#[derive(Debug)] +pub struct Analysis { + db: salsa::Snapshot, +} + +// As a general design guideline, `Analysis` API are intended to be independent +// from the language server protocol. That is, when exposing some functionality +// we should think in terms of "what API makes most sense" and not in terms of +// "what types LSP uses". Although currently LSP is the only consumer of the +// API, the API should in theory be usable as a library, or via a different +// protocol. +impl Analysis { + // Creates an analysis instance for a single file, without any extenal + // dependencies, stdlib support or ability to apply changes. See + // `AnalysisHost` for creating a fully-featured analysis. + pub fn from_single_file(text: String) -> (Analysis, FileId) { + let mut host = AnalysisHost::default(); + let file_id = FileId(0); + let mut file_set = FileSet::default(); + file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_string())); + let source_root = SourceRoot::new_local(file_set); + + let mut change = AnalysisChange::new(); + change.set_roots(vec![source_root]); + let mut crate_graph = CrateGraph::default(); + // FIXME: cfg options + // Default to enable test for single file. + let mut cfg_options = CfgOptions::default(); + cfg_options.insert_atom("test".into()); + crate_graph.add_crate_root( + file_id, + Edition::Edition2018, + None, + cfg_options, + Env::default(), + Default::default(), + ); + change.change_file(file_id, Some(Arc::new(text))); + change.set_crate_graph(crate_graph); + host.apply_change(change); + (host.analysis(), file_id) + } + + /// Debug info about the current state of the analysis. + pub fn status(&self) -> Cancelable { + self.with_db(|db| status::status(&*db)) + } + + pub fn prime_caches(&self, files: Vec) -> Cancelable<()> { + self.with_db(|db| prime_caches::prime_caches(db, files)) + } + + /// Gets the text of the source file. + pub fn file_text(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| db.file_text(file_id)) + } + + /// Gets the syntax tree of the file. + pub fn parse(&self, file_id: FileId) -> Cancelable { + self.with_db(|db| db.parse(file_id).tree()) + } + + /// Gets the file's `LineIndex`: data structure to convert between absolute + /// offsets and line/column representation. + pub fn file_line_index(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| db.line_index(file_id)) + } + + /// Selects the next syntactic nodes encompassing the range. + pub fn extend_selection(&self, frange: FileRange) -> Cancelable { + self.with_db(|db| extend_selection::extend_selection(db, frange)) + } + + /// Returns position of the matching brace (all types of braces are + /// supported). + pub fn matching_brace(&self, position: FilePosition) -> Cancelable> { + self.with_db(|db| { + let parse = db.parse(position.file_id); + let file = parse.tree(); + matching_brace::matching_brace(&file, position.offset) + }) + } + + /// Returns a syntax tree represented as `String`, for debug purposes. + // FIXME: use a better name here. + pub fn syntax_tree( + &self, + file_id: FileId, + text_range: Option, + ) -> Cancelable { + self.with_db(|db| syntax_tree::syntax_tree(&db, file_id, text_range)) + } + + pub fn expand_macro(&self, position: FilePosition) -> Cancelable> { + self.with_db(|db| expand_macro::expand_macro(db, position)) + } + + /// Returns an edit to remove all newlines in the range, cleaning up minor + /// stuff like trailing commas. + pub fn join_lines(&self, frange: FileRange) -> Cancelable { + self.with_db(|db| { + let parse = db.parse(frange.file_id); + join_lines::join_lines(&parse.tree(), frange.range) + }) + } + + /// Returns an edit which should be applied when opening a new line, fixing + /// up minor stuff like continuing the comment. + /// The edit will be a snippet (with `$0`). + pub fn on_enter(&self, position: FilePosition) -> Cancelable> { + self.with_db(|db| typing::on_enter(&db, position)) + } + + /// Returns an edit which should be applied after a character was typed. + /// + /// This is useful for some on-the-fly fixups, like adding `;` to `let =` + /// automatically. + pub fn on_char_typed( + &self, + position: FilePosition, + char_typed: char, + ) -> Cancelable> { + // Fast path to not even parse the file. + if !typing::TRIGGER_CHARS.contains(char_typed) { + return Ok(None); + } + self.with_db(|db| typing::on_char_typed(&db, position, char_typed)) + } + + /// Returns a tree representation of symbols in the file. Useful to draw a + /// file outline. + pub fn file_structure(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| file_structure::file_structure(&db.parse(file_id).tree())) + } + + /// Returns a list of the places in the file where type hints can be displayed. + pub fn inlay_hints( + &self, + file_id: FileId, + config: &InlayHintsConfig, + ) -> Cancelable> { + self.with_db(|db| inlay_hints::inlay_hints(db, file_id, config)) + } + + /// Returns the set of folding ranges. + pub fn folding_ranges(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| folding_ranges::folding_ranges(&db.parse(file_id).tree())) + } + + /// Fuzzy searches for a symbol. + pub fn symbol_search(&self, query: Query) -> Cancelable> { + self.with_db(|db| { + symbol_index::world_symbols(db, query) + .into_iter() + .map(|s| s.to_nav(db)) + .collect::>() + }) + } + + /// Returns the definitions from the symbol at `position`. + pub fn goto_definition( + &self, + position: FilePosition, + ) -> Cancelable>>> { + self.with_db(|db| goto_definition::goto_definition(db, position)) + } + + /// Returns the impls from the symbol at `position`. + pub fn goto_implementation( + &self, + position: FilePosition, + ) -> Cancelable>>> { + self.with_db(|db| goto_implementation::goto_implementation(db, position)) + } + + /// Returns the type definitions for the symbol at `position`. + pub fn goto_type_definition( + &self, + position: FilePosition, + ) -> Cancelable>>> { + self.with_db(|db| goto_type_definition::goto_type_definition(db, position)) + } + + /// Finds all usages of the reference at point. + pub fn find_all_refs( + &self, + position: FilePosition, + search_scope: Option, + ) -> Cancelable> { + self.with_db(|db| { + references::find_all_refs(&Semantics::new(db), position, search_scope).map(|it| it.info) + }) + } + + /// Returns a short text describing element at position. + pub fn hover(&self, position: FilePosition) -> Cancelable>> { + self.with_db(|db| hover::hover(db, position)) + } + + /// Computes parameter information for the given call expression. + pub fn call_info(&self, position: FilePosition) -> Cancelable> { + self.with_db(|db| call_info::call_info(db, position)) + } + + /// Computes call hierarchy candidates for the given file position. + pub fn call_hierarchy( + &self, + position: FilePosition, + ) -> Cancelable>>> { + self.with_db(|db| call_hierarchy::call_hierarchy(db, position)) + } + + /// Computes incoming calls for the given file position. + pub fn incoming_calls(&self, position: FilePosition) -> Cancelable>> { + self.with_db(|db| call_hierarchy::incoming_calls(db, position)) + } + + /// Computes incoming calls for the given file position. + pub fn outgoing_calls(&self, position: FilePosition) -> Cancelable>> { + self.with_db(|db| call_hierarchy::outgoing_calls(db, position)) + } + + /// Returns a `mod name;` declaration which created the current module. + pub fn parent_module(&self, position: FilePosition) -> Cancelable> { + self.with_db(|db| parent_module::parent_module(db, position)) + } + + /// Returns crates this file belongs too. + pub fn crate_for(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| parent_module::crate_for(db, file_id)) + } + + /// Returns the edition of the given crate. + pub fn crate_edition(&self, crate_id: CrateId) -> Cancelable { + self.with_db(|db| db.crate_graph()[crate_id].edition) + } + + /// Returns the root file of the given crate. + pub fn crate_root(&self, crate_id: CrateId) -> Cancelable { + self.with_db(|db| db.crate_graph()[crate_id].root_file_id) + } + + /// Returns the set of possible targets to run for the current file. + pub fn runnables(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| runnables::runnables(db, file_id)) + } + + /// Computes syntax highlighting for the given file + pub fn highlight(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| syntax_highlighting::highlight(db, file_id, None, false)) + } + + /// Computes syntax highlighting for the given file range. + pub fn highlight_range(&self, frange: FileRange) -> Cancelable> { + self.with_db(|db| { + syntax_highlighting::highlight(db, frange.file_id, Some(frange.range), false) + }) + } + + /// Computes syntax highlighting for the given file. + pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancelable { + self.with_db(|db| syntax_highlighting::highlight_as_html(db, file_id, rainbow)) + } + + /// Computes completions at the given position. + pub fn completions( + &self, + config: &CompletionConfig, + position: FilePosition, + ) -> Cancelable>> { + self.with_db(|db| completion::completions(db, config, position).map(Into::into)) + } + + /// Computes resolved assists with source changes for the given position. + pub fn resolved_assists( + &self, + config: &AssistConfig, + frange: FileRange, + ) -> Cancelable> { + self.with_db(|db| assists::Assist::resolved(db, config, frange)) + } + + /// Computes unresolved assists (aka code actions aka intentions) for the given + /// position. + pub fn unresolved_assists( + &self, + config: &AssistConfig, + frange: FileRange, + ) -> Cancelable> { + self.with_db(|db| Assist::unresolved(db, config, frange)) + } + + /// Computes the set of diagnostics for the given file. + pub fn diagnostics( + &self, + file_id: FileId, + enable_experimental: bool, + ) -> Cancelable> { + self.with_db(|db| diagnostics::diagnostics(db, file_id, enable_experimental)) + } + + /// Returns the edit required to rename reference at the position to the new + /// name. + pub fn rename( + &self, + position: FilePosition, + new_name: &str, + ) -> Cancelable>> { + self.with_db(|db| references::rename(db, position, new_name)) + } + + pub fn structural_search_replace( + &self, + query: &str, + parse_only: bool, + resolve_context: FilePosition, + selections: Vec, + ) -> Cancelable> { + self.with_db(|db| { + let rule: ssr::SsrRule = query.parse()?; + let mut match_finder = ssr::MatchFinder::in_context(db, resolve_context, selections); + match_finder.add_rule(rule)?; + let edits = if parse_only { Vec::new() } else { match_finder.edits() }; + Ok(SourceChange::from(edits)) + }) + } + + /// Performs an operation on that may be Canceled. + fn with_db(&self, f: F) -> Cancelable + where + F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe, + { + self.db.catch_canceled(f) + } +} + +#[test] +fn analysis_is_send() { + fn is_send() {} + is_send::(); +} diff --git a/crates/ide/src/markup.rs b/crates/ide/src/markup.rs new file mode 100644 index 000000000..60c193c40 --- /dev/null +++ b/crates/ide/src/markup.rs @@ -0,0 +1,38 @@ +//! Markdown formatting. +//! +//! Sometimes, we want to display a "rich text" in the UI. At the moment, we use +//! markdown for this purpose. It doesn't feel like a right option, but that's +//! what is used by LSP, so let's keep it simple. +use std::fmt; + +#[derive(Default, Debug)] +pub struct Markup { + text: String, +} + +impl From for String { + fn from(markup: Markup) -> Self { + markup.text + } +} + +impl From for Markup { + fn from(text: String) -> Self { + Markup { text } + } +} + +impl fmt::Display for Markup { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&self.text, f) + } +} + +impl Markup { + pub fn as_str(&self) -> &str { + self.text.as_str() + } + pub fn fenced_block(contents: &impl fmt::Display) -> Markup { + format!("```rust\n{}\n```", contents).into() + } +} diff --git a/crates/ide/src/matching_brace.rs b/crates/ide/src/matching_brace.rs new file mode 100644 index 000000000..cb6abb0db --- /dev/null +++ b/crates/ide/src/matching_brace.rs @@ -0,0 +1,73 @@ +use syntax::{ + ast::{self, AstNode}, + SourceFile, SyntaxKind, TextSize, T, +}; +use test_utils::mark; + +// Feature: Matching Brace +// +// If the cursor is on any brace (`<>(){}[]||`) which is a part of a brace-pair, +// moves cursor to the matching brace. It uses the actual parser to determine +// braces, so it won't confuse generics with comparisons. +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Rust Analyzer: Find matching brace** +// |=== +pub fn matching_brace(file: &SourceFile, offset: TextSize) -> Option { + const BRACES: &[SyntaxKind] = + &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>], T![|], T![|]]; + let (brace_token, brace_idx) = file + .syntax() + .token_at_offset(offset) + .filter_map(|node| { + let idx = BRACES.iter().position(|&brace| brace == node.kind())?; + Some((node, idx)) + }) + .next()?; + let parent = brace_token.parent(); + if brace_token.kind() == T![|] && !ast::ParamList::can_cast(parent.kind()) { + mark::hit!(pipes_not_braces); + return None; + } + let matching_kind = BRACES[brace_idx ^ 1]; + let matching_node = parent + .children_with_tokens() + .filter_map(|it| it.into_token()) + .find(|node| node.kind() == matching_kind && node != &brace_token)?; + Some(matching_node.text_range().start()) +} + +#[cfg(test)] +mod tests { + use test_utils::{add_cursor, assert_eq_text, extract_offset}; + + use super::*; + + #[test] + fn test_matching_brace() { + fn do_check(before: &str, after: &str) { + let (pos, before) = extract_offset(before); + let parse = SourceFile::parse(&before); + let new_pos = match matching_brace(&parse.tree(), pos) { + None => pos, + Some(pos) => pos, + }; + let actual = add_cursor(&before, new_pos); + assert_eq_text!(after, &actual); + } + + do_check("struct Foo { a: i32, }<|>", "struct Foo <|>{ a: i32, }"); + do_check("fn main() { |x: i32|<|> x * 2;}", "fn main() { <|>|x: i32| x * 2;}"); + do_check("fn main() { <|>|x: i32| x * 2;}", "fn main() { |x: i32<|>| x * 2;}"); + + { + mark::check!(pipes_not_braces); + do_check( + "fn main() { match 92 { 1 | 2 |<|> 3 => 92 } }", + "fn main() { match 92 { 1 | 2 |<|> 3 => 92 } }", + ); + } + } +} diff --git a/crates/ide/src/mock_analysis.rs b/crates/ide/src/mock_analysis.rs new file mode 100644 index 000000000..363e6d27e --- /dev/null +++ b/crates/ide/src/mock_analysis.rs @@ -0,0 +1,176 @@ +//! FIXME: write short doc here +use std::sync::Arc; + +use base_db::{CrateName, FileSet, SourceRoot, VfsPath}; +use cfg::CfgOptions; +use test_utils::{ + extract_annotations, extract_range_or_offset, Fixture, RangeOrOffset, CURSOR_MARKER, +}; + +use crate::{ + Analysis, AnalysisChange, AnalysisHost, CrateGraph, Edition, FileId, FilePosition, FileRange, +}; + +/// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis +/// from a set of in-memory files. +#[derive(Debug, Default)] +pub struct MockAnalysis { + files: Vec, +} + +impl MockAnalysis { + /// Creates `MockAnalysis` using a fixture data in the following format: + /// + /// ```not_rust + /// //- /main.rs + /// mod foo; + /// fn main() {} + /// + /// //- /foo.rs + /// struct Baz; + /// ``` + pub fn with_files(ra_fixture: &str) -> MockAnalysis { + let (res, pos) = MockAnalysis::with_fixture(ra_fixture); + assert!(pos.is_none()); + res + } + + /// Same as `with_files`, but requires that a single file contains a `<|>` marker, + /// whose position is also returned. + pub fn with_files_and_position(fixture: &str) -> (MockAnalysis, FilePosition) { + let (res, position) = MockAnalysis::with_fixture(fixture); + let (file_id, range_or_offset) = position.expect("expected a marker (<|>)"); + let offset = match range_or_offset { + RangeOrOffset::Range(_) => panic!(), + RangeOrOffset::Offset(it) => it, + }; + (res, FilePosition { file_id, offset }) + } + + fn with_fixture(fixture: &str) -> (MockAnalysis, Option<(FileId, RangeOrOffset)>) { + let mut position = None; + let mut res = MockAnalysis::default(); + for mut entry in Fixture::parse(fixture) { + if entry.text.contains(CURSOR_MARKER) { + assert!(position.is_none(), "only one marker (<|>) per fixture is allowed"); + let (range_or_offset, text) = extract_range_or_offset(&entry.text); + entry.text = text; + let file_id = res.add_file_fixture(entry); + position = Some((file_id, range_or_offset)); + } else { + res.add_file_fixture(entry); + } + } + (res, position) + } + + fn add_file_fixture(&mut self, fixture: Fixture) -> FileId { + let file_id = FileId((self.files.len() + 1) as u32); + self.files.push(fixture); + file_id + } + + pub fn id_of(&self, path: &str) -> FileId { + let (file_id, _) = + self.files().find(|(_, data)| path == data.path).expect("no file in this mock"); + file_id + } + pub fn annotations(&self) -> Vec<(FileRange, String)> { + self.files() + .flat_map(|(file_id, fixture)| { + let annotations = extract_annotations(&fixture.text); + annotations + .into_iter() + .map(move |(range, data)| (FileRange { file_id, range }, data)) + }) + .collect() + } + pub fn files(&self) -> impl Iterator + '_ { + self.files.iter().enumerate().map(|(idx, fixture)| (FileId(idx as u32 + 1), fixture)) + } + pub fn annotation(&self) -> (FileRange, String) { + let mut all = self.annotations(); + assert_eq!(all.len(), 1); + all.pop().unwrap() + } + pub fn analysis_host(self) -> AnalysisHost { + let mut host = AnalysisHost::default(); + let mut change = AnalysisChange::new(); + let mut file_set = FileSet::default(); + let mut crate_graph = CrateGraph::default(); + let mut root_crate = None; + for (i, data) in self.files.into_iter().enumerate() { + let path = data.path; + assert!(path.starts_with('/')); + + let mut cfg = CfgOptions::default(); + data.cfg_atoms.iter().for_each(|it| cfg.insert_atom(it.into())); + data.cfg_key_values.iter().for_each(|(k, v)| cfg.insert_key_value(k.into(), v.into())); + let edition: Edition = + data.edition.and_then(|it| it.parse().ok()).unwrap_or(Edition::Edition2018); + + let file_id = FileId(i as u32 + 1); + let env = data.env.into_iter().collect(); + if path == "/lib.rs" || path == "/main.rs" { + root_crate = Some(crate_graph.add_crate_root( + file_id, + edition, + None, + cfg, + env, + Default::default(), + )); + } else if path.ends_with("/lib.rs") { + let base = &path[..path.len() - "/lib.rs".len()]; + let crate_name = &base[base.rfind('/').unwrap() + '/'.len_utf8()..]; + let other_crate = crate_graph.add_crate_root( + file_id, + edition, + Some(crate_name.to_string()), + cfg, + env, + Default::default(), + ); + if let Some(root_crate) = root_crate { + crate_graph + .add_dep(root_crate, CrateName::new(crate_name).unwrap(), other_crate) + .unwrap(); + } + } + let path = VfsPath::new_virtual_path(path.to_string()); + file_set.insert(file_id, path); + change.change_file(file_id, Some(Arc::new(data.text).to_owned())); + } + change.set_crate_graph(crate_graph); + change.set_roots(vec![SourceRoot::new_local(file_set)]); + host.apply_change(change); + host + } + pub fn analysis(self) -> Analysis { + self.analysis_host().analysis() + } +} + +/// Creates analysis from a multi-file fixture, returns positions marked with <|>. +pub fn analysis_and_position(ra_fixture: &str) -> (Analysis, FilePosition) { + let (mock, position) = MockAnalysis::with_files_and_position(ra_fixture); + (mock.analysis(), position) +} + +/// Creates analysis for a single file. +pub fn single_file(ra_fixture: &str) -> (Analysis, FileId) { + let mock = MockAnalysis::with_files(ra_fixture); + let file_id = mock.id_of("/main.rs"); + (mock.analysis(), file_id) +} + +/// Creates analysis for a single file, returns range marked with a pair of <|>. +pub fn analysis_and_range(ra_fixture: &str) -> (Analysis, FileRange) { + let (res, position) = MockAnalysis::with_fixture(ra_fixture); + let (file_id, range_or_offset) = position.expect("expected a marker (<|>)"); + let range = match range_or_offset { + RangeOrOffset::Range(it) => it, + RangeOrOffset::Offset(_) => panic!(), + }; + (res.analysis(), FileRange { file_id, range }) +} diff --git a/crates/ide/src/parent_module.rs b/crates/ide/src/parent_module.rs new file mode 100644 index 000000000..59ed2967c --- /dev/null +++ b/crates/ide/src/parent_module.rs @@ -0,0 +1,155 @@ +use base_db::{CrateId, FileId, FilePosition}; +use hir::Semantics; +use ide_db::RootDatabase; +use syntax::{ + algo::find_node_at_offset, + ast::{self, AstNode}, +}; +use test_utils::mark; + +use crate::NavigationTarget; + +// Feature: Parent Module +// +// Navigates to the parent module of the current module. +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Rust Analyzer: Locate parent module** +// |=== + +/// This returns `Vec` because a module may be included from several places. We +/// don't handle this case yet though, so the Vec has length at most one. +pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec { + let sema = Semantics::new(db); + let source_file = sema.parse(position.file_id); + + let mut module = find_node_at_offset::(source_file.syntax(), position.offset); + + // If cursor is literally on `mod foo`, go to the grandpa. + if let Some(m) = &module { + if !m + .item_list() + .map_or(false, |it| it.syntax().text_range().contains_inclusive(position.offset)) + { + mark::hit!(test_resolve_parent_module_on_module_decl); + module = m.syntax().ancestors().skip(1).find_map(ast::Module::cast); + } + } + + let module = match module { + Some(module) => sema.to_def(&module), + None => sema.to_module_def(position.file_id), + }; + let module = match module { + None => return Vec::new(), + Some(it) => it, + }; + let nav = NavigationTarget::from_module_to_decl(db, module); + vec![nav] +} + +/// Returns `Vec` for the same reason as `parent_module` +pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec { + let sema = Semantics::new(db); + let module = match sema.to_module_def(file_id) { + Some(it) => it, + None => return Vec::new(), + }; + let krate = module.krate(); + vec![krate.into()] +} + +#[cfg(test)] +mod tests { + use base_db::Env; + use cfg::CfgOptions; + use test_utils::mark; + + use crate::{ + mock_analysis::{analysis_and_position, MockAnalysis}, + AnalysisChange, CrateGraph, + Edition::Edition2018, + }; + + #[test] + fn test_resolve_parent_module() { + let (analysis, pos) = analysis_and_position( + " + //- /lib.rs + mod foo; + //- /foo.rs + <|>// empty + ", + ); + let nav = analysis.parent_module(pos).unwrap().pop().unwrap(); + nav.assert_match("foo MODULE FileId(1) 0..8"); + } + + #[test] + fn test_resolve_parent_module_on_module_decl() { + mark::check!(test_resolve_parent_module_on_module_decl); + let (analysis, pos) = analysis_and_position( + " + //- /lib.rs + mod foo; + + //- /foo.rs + mod <|>bar; + + //- /foo/bar.rs + // empty + ", + ); + let nav = analysis.parent_module(pos).unwrap().pop().unwrap(); + nav.assert_match("foo MODULE FileId(1) 0..8"); + } + + #[test] + fn test_resolve_parent_module_for_inline() { + let (analysis, pos) = analysis_and_position( + " + //- /lib.rs + mod foo { + mod bar { + mod baz { <|> } + } + } + ", + ); + let nav = analysis.parent_module(pos).unwrap().pop().unwrap(); + nav.assert_match("baz MODULE FileId(1) 32..44"); + } + + #[test] + fn test_resolve_crate_root() { + let mock = MockAnalysis::with_files( + r#" +//- /bar.rs +mod foo; +//- /foo.rs +// empty +"#, + ); + let root_file = mock.id_of("/bar.rs"); + let mod_file = mock.id_of("/foo.rs"); + let mut host = mock.analysis_host(); + assert!(host.analysis().crate_for(mod_file).unwrap().is_empty()); + + let mut crate_graph = CrateGraph::default(); + let crate_id = crate_graph.add_crate_root( + root_file, + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + let mut change = AnalysisChange::new(); + change.set_crate_graph(crate_graph); + host.apply_change(change); + + assert_eq!(host.analysis().crate_for(mod_file).unwrap(), vec![crate_id]); + } +} diff --git a/crates/ide/src/prime_caches.rs b/crates/ide/src/prime_caches.rs new file mode 100644 index 000000000..c5ab5a1d8 --- /dev/null +++ b/crates/ide/src/prime_caches.rs @@ -0,0 +1,12 @@ +//! rust-analyzer is lazy and doesn't not compute anything unless asked. This +//! sometimes is counter productive when, for example, the first goto definition +//! request takes longer to compute. This modules implemented prepopulating of +//! various caches, it's not really advanced at the moment. + +use crate::{FileId, RootDatabase}; + +pub(crate) fn prime_caches(db: &RootDatabase, files: Vec) { + for file in files { + let _ = crate::syntax_highlighting::highlight(db, file, None, false); + } +} diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs new file mode 100644 index 000000000..0a76ec6b4 --- /dev/null +++ b/crates/ide/src/references.rs @@ -0,0 +1,694 @@ +//! This module implements a reference search. +//! First, the element at the cursor position must be either an `ast::Name` +//! or `ast::NameRef`. If it's a `ast::NameRef`, at the classification step we +//! try to resolve the direct tree parent of this element, otherwise we +//! already have a definition and just need to get its HIR together with +//! some information that is needed for futher steps of searching. +//! After that, we collect files that might contain references and look +//! for text occurrences of the identifier. If there's an `ast::NameRef` +//! at the index that the match starts at and its tree parent is +//! resolved to the search element definition, we get a reference. + +mod rename; + +use hir::Semantics; +use ide_db::{ + defs::{classify_name, classify_name_ref, Definition}, + search::SearchScope, + RootDatabase, +}; +use syntax::{ + algo::find_node_at_offset, + ast::{self, NameOwner}, + AstNode, SyntaxKind, SyntaxNode, TextRange, TokenAtOffset, +}; + +use crate::{display::TryToNav, FilePosition, FileRange, NavigationTarget, RangeInfo}; + +pub(crate) use self::rename::rename; + +pub use ide_db::search::{Reference, ReferenceAccess, ReferenceKind}; + +#[derive(Debug, Clone)] +pub struct ReferenceSearchResult { + declaration: Declaration, + references: Vec, +} + +#[derive(Debug, Clone)] +pub struct Declaration { + pub nav: NavigationTarget, + pub kind: ReferenceKind, + pub access: Option, +} + +impl ReferenceSearchResult { + pub fn declaration(&self) -> &Declaration { + &self.declaration + } + + pub fn decl_target(&self) -> &NavigationTarget { + &self.declaration.nav + } + + pub fn references(&self) -> &[Reference] { + &self.references + } + + /// Total number of references + /// At least 1 since all valid references should + /// Have a declaration + pub fn len(&self) -> usize { + self.references.len() + 1 + } +} + +// allow turning ReferenceSearchResult into an iterator +// over References +impl IntoIterator for ReferenceSearchResult { + type Item = Reference; + type IntoIter = std::vec::IntoIter; + + fn into_iter(mut self) -> Self::IntoIter { + let mut v = Vec::with_capacity(self.len()); + v.push(Reference { + file_range: FileRange { + file_id: self.declaration.nav.file_id, + range: self.declaration.nav.focus_or_full_range(), + }, + kind: self.declaration.kind, + access: self.declaration.access, + }); + v.append(&mut self.references); + v.into_iter() + } +} + +pub(crate) fn find_all_refs( + sema: &Semantics, + position: FilePosition, + search_scope: Option, +) -> Option> { + let _p = profile::span("find_all_refs"); + let syntax = sema.parse(position.file_id).syntax().clone(); + + let (opt_name, search_kind) = if let Some(name) = + get_struct_def_name_for_struct_literal_search(&sema, &syntax, position) + { + (Some(name), ReferenceKind::StructLiteral) + } else { + ( + sema.find_node_at_offset_with_descend::(&syntax, position.offset), + ReferenceKind::Other, + ) + }; + + let RangeInfo { range, info: def } = find_name(&sema, &syntax, position, opt_name)?; + + let references = def + .find_usages(sema, search_scope) + .into_iter() + .filter(|r| search_kind == ReferenceKind::Other || search_kind == r.kind) + .collect(); + + let decl_range = def.try_to_nav(sema.db)?.focus_or_full_range(); + + let declaration = Declaration { + nav: def.try_to_nav(sema.db)?, + kind: ReferenceKind::Other, + access: decl_access(&def, &syntax, decl_range), + }; + + Some(RangeInfo::new(range, ReferenceSearchResult { declaration, references })) +} + +fn find_name( + sema: &Semantics, + syntax: &SyntaxNode, + position: FilePosition, + opt_name: Option, +) -> Option> { + if let Some(name) = opt_name { + let def = classify_name(sema, &name)?.definition(sema.db); + let range = name.syntax().text_range(); + return Some(RangeInfo::new(range, def)); + } + let name_ref = + sema.find_node_at_offset_with_descend::(&syntax, position.offset)?; + let def = classify_name_ref(sema, &name_ref)?.definition(sema.db); + let range = name_ref.syntax().text_range(); + Some(RangeInfo::new(range, def)) +} + +fn decl_access(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> Option { + match def { + Definition::Local(_) | Definition::Field(_) => {} + _ => return None, + }; + + let stmt = find_node_at_offset::(syntax, range.start())?; + if stmt.initializer().is_some() { + let pat = stmt.pat()?; + if let ast::Pat::IdentPat(it) = pat { + if it.mut_token().is_some() { + return Some(ReferenceAccess::Write); + } + } + } + + None +} + +fn get_struct_def_name_for_struct_literal_search( + sema: &Semantics, + syntax: &SyntaxNode, + position: FilePosition, +) -> Option { + if let TokenAtOffset::Between(ref left, ref right) = syntax.token_at_offset(position.offset) { + if right.kind() != SyntaxKind::L_CURLY && right.kind() != SyntaxKind::L_PAREN { + return None; + } + if let Some(name) = + sema.find_node_at_offset_with_descend::(&syntax, left.text_range().start()) + { + return name.syntax().ancestors().find_map(ast::Struct::cast).and_then(|l| l.name()); + } + if sema + .find_node_at_offset_with_descend::( + &syntax, + left.text_range().start(), + ) + .is_some() + { + return left.ancestors().find_map(ast::Struct::cast).and_then(|l| l.name()); + } + } + None +} + +#[cfg(test)] +mod tests { + use crate::{ + mock_analysis::{analysis_and_position, MockAnalysis}, + Declaration, Reference, ReferenceSearchResult, SearchScope, + }; + + #[test] + fn test_struct_literal_after_space() { + let refs = get_all_refs( + r#" +struct Foo <|>{ + a: i32, +} +impl Foo { + fn f() -> i32 { 42 } +} +fn main() { + let f: Foo; + f = Foo {a: Foo::f()}; +} +"#, + ); + check_result( + refs, + "Foo STRUCT FileId(1) 0..26 7..10 Other", + &["FileId(1) 101..104 StructLiteral"], + ); + } + + #[test] + fn test_struct_literal_before_space() { + let refs = get_all_refs( + r#" +struct Foo<|> {} + fn main() { + let f: Foo; + f = Foo {}; +} +"#, + ); + check_result( + refs, + "Foo STRUCT FileId(1) 0..13 7..10 Other", + &["FileId(1) 41..44 Other", "FileId(1) 54..57 StructLiteral"], + ); + } + + #[test] + fn test_struct_literal_with_generic_type() { + let refs = get_all_refs( + r#" +struct Foo <|>{} + fn main() { + let f: Foo::; + f = Foo {}; +} +"#, + ); + check_result( + refs, + "Foo STRUCT FileId(1) 0..16 7..10 Other", + &["FileId(1) 64..67 StructLiteral"], + ); + } + + #[test] + fn test_struct_literal_for_tuple() { + let refs = get_all_refs( + r#" +struct Foo<|>(i32); + +fn main() { + let f: Foo; + f = Foo(1); +} +"#, + ); + check_result( + refs, + "Foo STRUCT FileId(1) 0..16 7..10 Other", + &["FileId(1) 54..57 StructLiteral"], + ); + } + + #[test] + fn test_find_all_refs_for_local() { + let refs = get_all_refs( + r#" +fn main() { + let mut i = 1; + let j = 1; + i = i<|> + j; + + { + i = 0; + } + + i = 5; +}"#, + ); + check_result( + refs, + "i IDENT_PAT FileId(1) 24..25 Other Write", + &[ + "FileId(1) 50..51 Other Write", + "FileId(1) 54..55 Other Read", + "FileId(1) 76..77 Other Write", + "FileId(1) 94..95 Other Write", + ], + ); + } + + #[test] + fn search_filters_by_range() { + let refs = get_all_refs( + r#" +fn foo() { + let spam<|> = 92; + spam + spam +} +fn bar() { + let spam = 92; + spam + spam +} +"#, + ); + check_result( + refs, + "spam IDENT_PAT FileId(1) 19..23 Other", + &["FileId(1) 34..38 Other Read", "FileId(1) 41..45 Other Read"], + ); + } + + #[test] + fn test_find_all_refs_for_param_inside() { + let refs = get_all_refs( + r#" +fn foo(i : u32) -> u32 { + i<|> +} +"#, + ); + check_result(refs, "i IDENT_PAT FileId(1) 7..8 Other", &["FileId(1) 29..30 Other Read"]); + } + + #[test] + fn test_find_all_refs_for_fn_param() { + let refs = get_all_refs( + r#" +fn foo(i<|> : u32) -> u32 { + i +} +"#, + ); + check_result(refs, "i IDENT_PAT FileId(1) 7..8 Other", &["FileId(1) 29..30 Other Read"]); + } + + #[test] + fn test_find_all_refs_field_name() { + let refs = get_all_refs( + r#" +//- /lib.rs +struct Foo { + pub spam<|>: u32, +} + +fn main(s: Foo) { + let f = s.spam; +} +"#, + ); + check_result( + refs, + "spam RECORD_FIELD FileId(1) 17..30 21..25 Other", + &["FileId(1) 67..71 Other Read"], + ); + } + + #[test] + fn test_find_all_refs_impl_item_name() { + let refs = get_all_refs( + r#" +struct Foo; +impl Foo { + fn f<|>(&self) { } +} +"#, + ); + check_result(refs, "f FN FileId(1) 27..43 30..31 Other", &[]); + } + + #[test] + fn test_find_all_refs_enum_var_name() { + let refs = get_all_refs( + r#" +enum Foo { + A, + B<|>, + C, +} +"#, + ); + check_result(refs, "B VARIANT FileId(1) 22..23 22..23 Other", &[]); + } + + #[test] + fn test_find_all_refs_two_modules() { + let (analysis, pos) = analysis_and_position( + r#" +//- /lib.rs +pub mod foo; +pub mod bar; + +fn f() { + let i = foo::Foo { n: 5 }; +} + +//- /foo.rs +use crate::bar; + +pub struct Foo { + pub n: u32, +} + +fn f() { + let i = bar::Bar { n: 5 }; +} + +//- /bar.rs +use crate::foo; + +pub struct Bar { + pub n: u32, +} + +fn f() { + let i = foo::Foo<|> { n: 5 }; +} +"#, + ); + let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); + check_result( + refs, + "Foo STRUCT FileId(2) 17..51 28..31 Other", + &["FileId(1) 53..56 StructLiteral", "FileId(3) 79..82 StructLiteral"], + ); + } + + // `mod foo;` is not in the results because `foo` is an `ast::Name`. + // So, there are two references: the first one is a definition of the `foo` module, + // which is the whole `foo.rs`, and the second one is in `use foo::Foo`. + #[test] + fn test_find_all_refs_decl_module() { + let (analysis, pos) = analysis_and_position( + r#" +//- /lib.rs +mod foo<|>; + +use foo::Foo; + +fn f() { + let i = Foo { n: 5 }; +} + +//- /foo.rs +pub struct Foo { + pub n: u32, +} +"#, + ); + let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); + check_result(refs, "foo SOURCE_FILE FileId(2) 0..35 Other", &["FileId(1) 14..17 Other"]); + } + + #[test] + fn test_find_all_refs_super_mod_vis() { + let (analysis, pos) = analysis_and_position( + r#" +//- /lib.rs +mod foo; + +//- /foo.rs +mod some; +use some::Foo; + +fn f() { + let i = Foo { n: 5 }; +} + +//- /foo/some.rs +pub(super) struct Foo<|> { + pub n: u32, +} +"#, + ); + let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); + check_result( + refs, + "Foo STRUCT FileId(3) 0..41 18..21 Other", + &["FileId(2) 20..23 Other", "FileId(2) 47..50 StructLiteral"], + ); + } + + #[test] + fn test_find_all_refs_with_scope() { + let code = r#" + //- /lib.rs + mod foo; + mod bar; + + pub fn quux<|>() {} + + //- /foo.rs + fn f() { super::quux(); } + + //- /bar.rs + fn f() { super::quux(); } + "#; + + let (mock, pos) = MockAnalysis::with_files_and_position(code); + let bar = mock.id_of("/bar.rs"); + let analysis = mock.analysis(); + + let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); + check_result( + refs, + "quux FN FileId(1) 19..35 26..30 Other", + &["FileId(2) 16..20 StructLiteral", "FileId(3) 16..20 StructLiteral"], + ); + + let refs = + analysis.find_all_refs(pos, Some(SearchScope::single_file(bar))).unwrap().unwrap(); + check_result( + refs, + "quux FN FileId(1) 19..35 26..30 Other", + &["FileId(3) 16..20 StructLiteral"], + ); + } + + #[test] + fn test_find_all_refs_macro_def() { + let refs = get_all_refs( + r#" +#[macro_export] +macro_rules! m1<|> { () => (()) } + +fn foo() { + m1(); + m1(); +} +"#, + ); + check_result( + refs, + "m1 MACRO_CALL FileId(1) 0..46 29..31 Other", + &["FileId(1) 63..65 StructLiteral", "FileId(1) 73..75 StructLiteral"], + ); + } + + #[test] + fn test_basic_highlight_read_write() { + let refs = get_all_refs( + r#" +fn foo() { + let mut i<|> = 0; + i = i + 1; +} +"#, + ); + check_result( + refs, + "i IDENT_PAT FileId(1) 23..24 Other Write", + &["FileId(1) 34..35 Other Write", "FileId(1) 38..39 Other Read"], + ); + } + + #[test] + fn test_basic_highlight_field_read_write() { + let refs = get_all_refs( + r#" +struct S { + f: u32, +} + +fn foo() { + let mut s = S{f: 0}; + s.f<|> = 0; +} +"#, + ); + check_result( + refs, + "f RECORD_FIELD FileId(1) 15..21 15..16 Other", + &["FileId(1) 55..56 Other Read", "FileId(1) 68..69 Other Write"], + ); + } + + #[test] + fn test_basic_highlight_decl_no_write() { + let refs = get_all_refs( + r#" +fn foo() { + let i<|>; + i = 1; +} +"#, + ); + check_result(refs, "i IDENT_PAT FileId(1) 19..20 Other", &["FileId(1) 26..27 Other Write"]); + } + + #[test] + fn test_find_struct_function_refs_outside_module() { + let refs = get_all_refs( + r#" +mod foo { + pub struct Foo; + + impl Foo { + pub fn new<|>() -> Foo { + Foo + } + } +} + +fn main() { + let _f = foo::Foo::new(); +} +"#, + ); + check_result( + refs, + "new FN FileId(1) 54..101 61..64 Other", + &["FileId(1) 146..149 StructLiteral"], + ); + } + + #[test] + fn test_find_all_refs_nested_module() { + let code = r#" + //- /lib.rs + mod foo { + mod bar; + } + + fn f<|>() {} + + //- /foo/bar.rs + use crate::f; + + fn g() { + f(); + } + "#; + + let (analysis, pos) = analysis_and_position(code); + let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); + check_result( + refs, + "f FN FileId(1) 26..35 29..30 Other", + &["FileId(2) 11..12 Other", "FileId(2) 28..29 StructLiteral"], + ); + } + + fn get_all_refs(ra_fixture: &str) -> ReferenceSearchResult { + let (analysis, position) = analysis_and_position(ra_fixture); + analysis.find_all_refs(position, None).unwrap().unwrap() + } + + fn check_result(res: ReferenceSearchResult, expected_decl: &str, expected_refs: &[&str]) { + res.declaration().assert_match(expected_decl); + assert_eq!(res.references.len(), expected_refs.len()); + res.references() + .iter() + .enumerate() + .for_each(|(i, r)| ref_assert_match(r, expected_refs[i])); + } + + impl Declaration { + fn debug_render(&self) -> String { + let mut s = format!("{} {:?}", self.nav.debug_render(), self.kind); + if let Some(access) = self.access { + s.push_str(&format!(" {:?}", access)); + } + s + } + + fn assert_match(&self, expected: &str) { + let actual = self.debug_render(); + test_utils::assert_eq_text!(expected.trim(), actual.trim(),); + } + } + + fn ref_debug_render(r: &Reference) -> String { + let mut s = format!("{:?} {:?} {:?}", r.file_range.file_id, r.file_range.range, r.kind); + if let Some(access) = r.access { + s.push_str(&format!(" {:?}", access)); + } + s + } + + fn ref_assert_match(r: &Reference, expected: &str) { + let actual = ref_debug_render(r); + test_utils::assert_eq_text!(expected.trim(), actual.trim(),); + } +} diff --git a/crates/ide/src/references/rename.rs b/crates/ide/src/references/rename.rs new file mode 100644 index 000000000..d73dc9cd0 --- /dev/null +++ b/crates/ide/src/references/rename.rs @@ -0,0 +1,1010 @@ +//! FIXME: write short doc here + +use base_db::SourceDatabaseExt; +use hir::{Module, ModuleDef, ModuleSource, Semantics}; +use ide_db::{ + defs::{classify_name, classify_name_ref, Definition, NameClass, NameRefClass}, + RootDatabase, +}; +use std::convert::TryInto; +use syntax::{ + algo::find_node_at_offset, + ast::{self, NameOwner}, + lex_single_valid_syntax_kind, match_ast, AstNode, SyntaxKind, SyntaxNode, SyntaxToken, +}; +use test_utils::mark; +use text_edit::TextEdit; + +use crate::{ + references::find_all_refs, FilePosition, FileSystemEdit, RangeInfo, Reference, ReferenceKind, + SourceChange, SourceFileEdit, TextRange, TextSize, +}; + +pub(crate) fn rename( + db: &RootDatabase, + position: FilePosition, + new_name: &str, +) -> Option> { + let sema = Semantics::new(db); + + match lex_single_valid_syntax_kind(new_name)? { + SyntaxKind::IDENT | SyntaxKind::UNDERSCORE => (), + SyntaxKind::SELF_KW => return rename_to_self(&sema, position), + _ => return None, + } + + let source_file = sema.parse(position.file_id); + let syntax = source_file.syntax(); + if let Some(module) = find_module_at_offset(&sema, position, syntax) { + rename_mod(&sema, position, module, new_name) + } else if let Some(self_token) = + syntax.token_at_offset(position.offset).find(|t| t.kind() == SyntaxKind::SELF_KW) + { + rename_self_to_param(&sema, position, self_token, new_name) + } else { + rename_reference(&sema, position, new_name) + } +} + +fn find_module_at_offset( + sema: &Semantics, + position: FilePosition, + syntax: &SyntaxNode, +) -> Option { + let ident = syntax.token_at_offset(position.offset).find(|t| t.kind() == SyntaxKind::IDENT)?; + + let module = match_ast! { + match (ident.parent()) { + ast::NameRef(name_ref) => { + match classify_name_ref(sema, &name_ref)? { + NameRefClass::Definition(Definition::ModuleDef(ModuleDef::Module(module))) => module, + _ => return None, + } + }, + ast::Name(name) => { + match classify_name(&sema, &name)? { + NameClass::Definition(Definition::ModuleDef(ModuleDef::Module(module))) => module, + _ => return None, + } + }, + _ => return None, + } + }; + + Some(module) +} + +fn source_edit_from_reference(reference: Reference, new_name: &str) -> SourceFileEdit { + let mut replacement_text = String::new(); + let file_id = reference.file_range.file_id; + let range = match reference.kind { + ReferenceKind::FieldShorthandForField => { + mark::hit!(test_rename_struct_field_for_shorthand); + replacement_text.push_str(new_name); + replacement_text.push_str(": "); + TextRange::new(reference.file_range.range.start(), reference.file_range.range.start()) + } + ReferenceKind::FieldShorthandForLocal => { + mark::hit!(test_rename_local_for_field_shorthand); + replacement_text.push_str(": "); + replacement_text.push_str(new_name); + TextRange::new(reference.file_range.range.end(), reference.file_range.range.end()) + } + _ => { + replacement_text.push_str(new_name); + reference.file_range.range + } + }; + SourceFileEdit { file_id, edit: TextEdit::replace(range, replacement_text) } +} + +fn rename_mod( + sema: &Semantics, + position: FilePosition, + module: Module, + new_name: &str, +) -> Option> { + let mut source_file_edits = Vec::new(); + let mut file_system_edits = Vec::new(); + + let src = module.definition_source(sema.db); + let file_id = src.file_id.original_file(sema.db); + match src.value { + ModuleSource::SourceFile(..) => { + // mod is defined in path/to/dir/mod.rs + let dst = if module.is_mod_rs(sema.db) { + format!("../{}/mod.rs", new_name) + } else { + format!("{}.rs", new_name) + }; + let move_file = FileSystemEdit::MoveFile { src: file_id, anchor: file_id, dst }; + file_system_edits.push(move_file); + } + ModuleSource::Module(..) => {} + } + + if let Some(src) = module.declaration_source(sema.db) { + let file_id = src.file_id.original_file(sema.db); + let name = src.value.name()?; + let edit = SourceFileEdit { + file_id, + edit: TextEdit::replace(name.syntax().text_range(), new_name.into()), + }; + source_file_edits.push(edit); + } + + let RangeInfo { range, info: refs } = find_all_refs(sema, position, None)?; + let ref_edits = refs + .references + .into_iter() + .map(|reference| source_edit_from_reference(reference, new_name)); + source_file_edits.extend(ref_edits); + + Some(RangeInfo::new(range, SourceChange::from_edits(source_file_edits, file_system_edits))) +} + +fn rename_to_self( + sema: &Semantics, + position: FilePosition, +) -> Option> { + let source_file = sema.parse(position.file_id); + let syn = source_file.syntax(); + + let fn_def = find_node_at_offset::(syn, position.offset)?; + let params = fn_def.param_list()?; + if params.self_param().is_some() { + return None; // method already has self param + } + let first_param = params.params().next()?; + let mutable = match first_param.ty() { + Some(ast::Type::RefType(rt)) => rt.mut_token().is_some(), + _ => return None, // not renaming other types + }; + + let RangeInfo { range, info: refs } = find_all_refs(sema, position, None)?; + + let param_range = first_param.syntax().text_range(); + let (param_ref, usages): (Vec, Vec) = refs + .into_iter() + .partition(|reference| param_range.intersect(reference.file_range.range).is_some()); + + if param_ref.is_empty() { + return None; + } + + let mut edits = usages + .into_iter() + .map(|reference| source_edit_from_reference(reference, "self")) + .collect::>(); + + edits.push(SourceFileEdit { + file_id: position.file_id, + edit: TextEdit::replace( + param_range, + String::from(if mutable { "&mut self" } else { "&self" }), + ), + }); + + Some(RangeInfo::new(range, SourceChange::from(edits))) +} + +fn text_edit_from_self_param( + syn: &SyntaxNode, + self_param: &ast::SelfParam, + new_name: &str, +) -> Option { + fn target_type_name(impl_def: &ast::Impl) -> Option { + if let Some(ast::Type::PathType(p)) = impl_def.self_ty() { + return Some(p.path()?.segment()?.name_ref()?.text().to_string()); + } + None + } + + let impl_def = find_node_at_offset::(syn, self_param.syntax().text_range().start())?; + let type_name = target_type_name(&impl_def)?; + + let mut replacement_text = String::from(new_name); + replacement_text.push_str(": "); + replacement_text.push_str(self_param.mut_token().map_or("&", |_| "&mut ")); + replacement_text.push_str(type_name.as_str()); + + Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text)) +} + +fn rename_self_to_param( + sema: &Semantics, + position: FilePosition, + self_token: SyntaxToken, + new_name: &str, +) -> Option> { + let source_file = sema.parse(position.file_id); + let syn = source_file.syntax(); + + let text = sema.db.file_text(position.file_id); + let fn_def = find_node_at_offset::(syn, position.offset)?; + let search_range = fn_def.syntax().text_range(); + + let mut edits: Vec = vec![]; + + for (idx, _) in text.match_indices("self") { + let offset: TextSize = idx.try_into().unwrap(); + if !search_range.contains_inclusive(offset) { + continue; + } + if let Some(ref usage) = + syn.token_at_offset(offset).find(|t| t.kind() == SyntaxKind::SELF_KW) + { + let edit = if let Some(ref self_param) = ast::SelfParam::cast(usage.parent()) { + text_edit_from_self_param(syn, self_param, new_name)? + } else { + TextEdit::replace(usage.text_range(), String::from(new_name)) + }; + edits.push(SourceFileEdit { file_id: position.file_id, edit }); + } + } + + let range = ast::SelfParam::cast(self_token.parent()) + .map_or(self_token.text_range(), |p| p.syntax().text_range()); + + Some(RangeInfo::new(range, SourceChange::from(edits))) +} + +fn rename_reference( + sema: &Semantics, + position: FilePosition, + new_name: &str, +) -> Option> { + let RangeInfo { range, info: refs } = find_all_refs(sema, position, None)?; + + let edit = refs + .into_iter() + .map(|reference| source_edit_from_reference(reference, new_name)) + .collect::>(); + + if edit.is_empty() { + return None; + } + + Some(RangeInfo::new(range, SourceChange::from(edit))) +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + use stdx::trim_indent; + use test_utils::{assert_eq_text, mark}; + use text_edit::TextEdit; + + use crate::{mock_analysis::analysis_and_position, FileId}; + + fn check(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) { + let ra_fixture_after = &trim_indent(ra_fixture_after); + let (analysis, position) = analysis_and_position(ra_fixture_before); + let source_change = analysis.rename(position, new_name).unwrap(); + let mut text_edit_builder = TextEdit::builder(); + let mut file_id: Option = None; + if let Some(change) = source_change { + for edit in change.info.source_file_edits { + file_id = Some(edit.file_id); + for indel in edit.edit.into_iter() { + text_edit_builder.replace(indel.delete, indel.insert); + } + } + } + let mut result = analysis.file_text(file_id.unwrap()).unwrap().to_string(); + text_edit_builder.finish().apply(&mut result); + assert_eq_text!(ra_fixture_after, &*result); + } + + fn check_expect(new_name: &str, ra_fixture: &str, expect: Expect) { + let (analysis, position) = analysis_and_position(ra_fixture); + let source_change = analysis.rename(position, new_name).unwrap().unwrap(); + expect.assert_debug_eq(&source_change) + } + + #[test] + fn test_rename_to_underscore() { + check("_", r#"fn main() { let i<|> = 1; }"#, r#"fn main() { let _ = 1; }"#); + } + + #[test] + fn test_rename_to_raw_identifier() { + check("r#fn", r#"fn main() { let i<|> = 1; }"#, r#"fn main() { let r#fn = 1; }"#); + } + + #[test] + fn test_rename_to_invalid_identifier() { + let (analysis, position) = analysis_and_position(r#"fn main() { let i<|> = 1; }"#); + let new_name = "invalid!"; + let source_change = analysis.rename(position, new_name).unwrap(); + assert!(source_change.is_none()); + } + + #[test] + fn test_rename_for_local() { + check( + "k", + r#" +fn main() { + let mut i = 1; + let j = 1; + i = i<|> + j; + + { i = 0; } + + i = 5; +} +"#, + r#" +fn main() { + let mut k = 1; + let j = 1; + k = k + j; + + { k = 0; } + + k = 5; +} +"#, + ); + } + + #[test] + fn test_rename_for_macro_args() { + check( + "b", + r#" +macro_rules! foo {($i:ident) => {$i} } +fn main() { + let a<|> = "test"; + foo!(a); +} +"#, + r#" +macro_rules! foo {($i:ident) => {$i} } +fn main() { + let b = "test"; + foo!(b); +} +"#, + ); + } + + #[test] + fn test_rename_for_macro_args_rev() { + check( + "b", + r#" +macro_rules! foo {($i:ident) => {$i} } +fn main() { + let a = "test"; + foo!(a<|>); +} +"#, + r#" +macro_rules! foo {($i:ident) => {$i} } +fn main() { + let b = "test"; + foo!(b); +} +"#, + ); + } + + #[test] + fn test_rename_for_macro_define_fn() { + check( + "bar", + r#" +macro_rules! define_fn {($id:ident) => { fn $id{} }} +define_fn!(foo); +fn main() { + fo<|>o(); +} +"#, + r#" +macro_rules! define_fn {($id:ident) => { fn $id{} }} +define_fn!(bar); +fn main() { + bar(); +} +"#, + ); + } + + #[test] + fn test_rename_for_macro_define_fn_rev() { + check( + "bar", + r#" +macro_rules! define_fn {($id:ident) => { fn $id{} }} +define_fn!(fo<|>o); +fn main() { + foo(); +} +"#, + r#" +macro_rules! define_fn {($id:ident) => { fn $id{} }} +define_fn!(bar); +fn main() { + bar(); +} +"#, + ); + } + + #[test] + fn test_rename_for_param_inside() { + check("j", r#"fn foo(i : u32) -> u32 { i<|> }"#, r#"fn foo(j : u32) -> u32 { j }"#); + } + + #[test] + fn test_rename_refs_for_fn_param() { + check("j", r#"fn foo(i<|> : u32) -> u32 { i }"#, r#"fn foo(j : u32) -> u32 { j }"#); + } + + #[test] + fn test_rename_for_mut_param() { + check("j", r#"fn foo(mut i<|> : u32) -> u32 { i }"#, r#"fn foo(mut j : u32) -> u32 { j }"#); + } + + #[test] + fn test_rename_struct_field() { + check( + "j", + r#" +struct Foo { i<|>: i32 } + +impl Foo { + fn new(i: i32) -> Self { + Self { i: i } + } +} +"#, + r#" +struct Foo { j: i32 } + +impl Foo { + fn new(i: i32) -> Self { + Self { j: i } + } +} +"#, + ); + } + + #[test] + fn test_rename_struct_field_for_shorthand() { + mark::check!(test_rename_struct_field_for_shorthand); + check( + "j", + r#" +struct Foo { i<|>: i32 } + +impl Foo { + fn new(i: i32) -> Self { + Self { i } + } +} +"#, + r#" +struct Foo { j: i32 } + +impl Foo { + fn new(i: i32) -> Self { + Self { j: i } + } +} +"#, + ); + } + + #[test] + fn test_rename_local_for_field_shorthand() { + mark::check!(test_rename_local_for_field_shorthand); + check( + "j", + r#" +struct Foo { i: i32 } + +impl Foo { + fn new(i<|>: i32) -> Self { + Self { i } + } +} +"#, + r#" +struct Foo { i: i32 } + +impl Foo { + fn new(j: i32) -> Self { + Self { i: j } + } +} +"#, + ); + } + + #[test] + fn test_field_shorthand_correct_struct() { + check( + "j", + r#" +struct Foo { i<|>: i32 } +struct Bar { i: i32 } + +impl Bar { + fn new(i: i32) -> Self { + Self { i } + } +} +"#, + r#" +struct Foo { j: i32 } +struct Bar { i: i32 } + +impl Bar { + fn new(i: i32) -> Self { + Self { i } + } +} +"#, + ); + } + + #[test] + fn test_shadow_local_for_struct_shorthand() { + check( + "j", + r#" +struct Foo { i: i32 } + +fn baz(i<|>: i32) -> Self { + let x = Foo { i }; + { + let i = 0; + Foo { i } + } +} +"#, + r#" +struct Foo { i: i32 } + +fn baz(j: i32) -> Self { + let x = Foo { i: j }; + { + let i = 0; + Foo { i } + } +} +"#, + ); + } + + #[test] + fn test_rename_mod() { + check_expect( + "foo2", + r#" +//- /lib.rs +mod bar; + +//- /bar.rs +mod foo<|>; + +//- /bar/foo.rs +// empty +"#, + expect![[r#" + RangeInfo { + range: 4..7, + info: SourceChange { + source_file_edits: [ + SourceFileEdit { + file_id: FileId( + 2, + ), + edit: TextEdit { + indels: [ + Indel { + insert: "foo2", + delete: 4..7, + }, + ], + }, + }, + ], + file_system_edits: [ + MoveFile { + src: FileId( + 3, + ), + anchor: FileId( + 3, + ), + dst: "foo2.rs", + }, + ], + is_snippet: false, + }, + } + "#]], + ); + } + + #[test] + fn test_rename_mod_in_use_tree() { + check_expect( + "quux", + r#" +//- /main.rs +pub mod foo; +pub mod bar; +fn main() {} + +//- /foo.rs +pub struct FooContent; + +//- /bar.rs +use crate::foo<|>::FooContent; +"#, + expect![[r#" + RangeInfo { + range: 11..14, + info: SourceChange { + source_file_edits: [ + SourceFileEdit { + file_id: FileId( + 1, + ), + edit: TextEdit { + indels: [ + Indel { + insert: "quux", + delete: 8..11, + }, + ], + }, + }, + SourceFileEdit { + file_id: FileId( + 3, + ), + edit: TextEdit { + indels: [ + Indel { + insert: "quux", + delete: 11..14, + }, + ], + }, + }, + ], + file_system_edits: [ + MoveFile { + src: FileId( + 2, + ), + anchor: FileId( + 2, + ), + dst: "quux.rs", + }, + ], + is_snippet: false, + }, + } + "#]], + ); + } + + #[test] + fn test_rename_mod_in_dir() { + check_expect( + "foo2", + r#" +//- /lib.rs +mod fo<|>o; +//- /foo/mod.rs +// emtpy +"#, + expect![[r#" + RangeInfo { + range: 4..7, + info: SourceChange { + source_file_edits: [ + SourceFileEdit { + file_id: FileId( + 1, + ), + edit: TextEdit { + indels: [ + Indel { + insert: "foo2", + delete: 4..7, + }, + ], + }, + }, + ], + file_system_edits: [ + MoveFile { + src: FileId( + 2, + ), + anchor: FileId( + 2, + ), + dst: "../foo2/mod.rs", + }, + ], + is_snippet: false, + }, + } + "#]], + ); + } + + #[test] + fn test_rename_unusually_nested_mod() { + check_expect( + "bar", + r#" +//- /lib.rs +mod outer { mod fo<|>o; } + +//- /outer/foo.rs +// emtpy +"#, + expect![[r#" + RangeInfo { + range: 16..19, + info: SourceChange { + source_file_edits: [ + SourceFileEdit { + file_id: FileId( + 1, + ), + edit: TextEdit { + indels: [ + Indel { + insert: "bar", + delete: 16..19, + }, + ], + }, + }, + ], + file_system_edits: [ + MoveFile { + src: FileId( + 2, + ), + anchor: FileId( + 2, + ), + dst: "bar.rs", + }, + ], + is_snippet: false, + }, + } + "#]], + ); + } + + #[test] + fn test_module_rename_in_path() { + check( + "baz", + r#" +mod <|>foo { pub fn bar() {} } + +fn main() { foo::bar(); } +"#, + r#" +mod baz { pub fn bar() {} } + +fn main() { baz::bar(); } +"#, + ); + } + + #[test] + fn test_rename_mod_filename_and_path() { + check_expect( + "foo2", + r#" +//- /lib.rs +mod bar; +fn f() { + bar::foo::fun() +} + +//- /bar.rs +pub mod foo<|>; + +//- /bar/foo.rs +// pub fn fun() {} +"#, + expect![[r#" + RangeInfo { + range: 8..11, + info: SourceChange { + source_file_edits: [ + SourceFileEdit { + file_id: FileId( + 2, + ), + edit: TextEdit { + indels: [ + Indel { + insert: "foo2", + delete: 8..11, + }, + ], + }, + }, + SourceFileEdit { + file_id: FileId( + 1, + ), + edit: TextEdit { + indels: [ + Indel { + insert: "foo2", + delete: 27..30, + }, + ], + }, + }, + ], + file_system_edits: [ + MoveFile { + src: FileId( + 3, + ), + anchor: FileId( + 3, + ), + dst: "foo2.rs", + }, + ], + is_snippet: false, + }, + } + "#]], + ); + } + + #[test] + fn test_enum_variant_from_module_1() { + check( + "Baz", + r#" +mod foo { + pub enum Foo { Bar<|> } +} + +fn func(f: foo::Foo) { + match f { + foo::Foo::Bar => {} + } +} +"#, + r#" +mod foo { + pub enum Foo { Baz } +} + +fn func(f: foo::Foo) { + match f { + foo::Foo::Baz => {} + } +} +"#, + ); + } + + #[test] + fn test_enum_variant_from_module_2() { + check( + "baz", + r#" +mod foo { + pub struct Foo { pub bar<|>: uint } +} + +fn foo(f: foo::Foo) { + let _ = f.bar; +} +"#, + r#" +mod foo { + pub struct Foo { pub baz: uint } +} + +fn foo(f: foo::Foo) { + let _ = f.baz; +} +"#, + ); + } + + #[test] + fn test_parameter_to_self() { + check( + "self", + r#" +struct Foo { i: i32 } + +impl Foo { + fn f(foo<|>: &mut Foo) -> i32 { + foo.i + } +} +"#, + r#" +struct Foo { i: i32 } + +impl Foo { + fn f(&mut self) -> i32 { + self.i + } +} +"#, + ); + } + + #[test] + fn test_self_to_parameter() { + check( + "foo", + r#" +struct Foo { i: i32 } + +impl Foo { + fn f(&mut <|>self) -> i32 { + self.i + } +} +"#, + r#" +struct Foo { i: i32 } + +impl Foo { + fn f(foo: &mut Foo) -> i32 { + foo.i + } +} +"#, + ); + } + + #[test] + fn test_self_in_path_to_parameter() { + check( + "foo", + r#" +struct Foo { i: i32 } + +impl Foo { + fn f(&self) -> i32 { + let self_var = 1; + self<|>.i + } +} +"#, + r#" +struct Foo { i: i32 } + +impl Foo { + fn f(foo: &Foo) -> i32 { + let self_var = 1; + foo.i + } +} +"#, + ); + } +} diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs new file mode 100644 index 000000000..c3e07c8de --- /dev/null +++ b/crates/ide/src/runnables.rs @@ -0,0 +1,883 @@ +use std::fmt; + +use cfg::CfgExpr; +use hir::{AsAssocItem, Attrs, HirFileId, InFile, Semantics}; +use ide_db::RootDatabase; +use itertools::Itertools; +use syntax::{ + ast::{self, AstNode, AttrsOwner, DocCommentsOwner, ModuleItemOwner, NameOwner}, + match_ast, SyntaxNode, +}; + +use crate::{display::ToNav, FileId, NavigationTarget}; + +#[derive(Debug, Clone)] +pub struct Runnable { + pub nav: NavigationTarget, + pub kind: RunnableKind, + pub cfg_exprs: Vec, +} + +#[derive(Debug, Clone)] +pub enum TestId { + Name(String), + Path(String), +} + +impl fmt::Display for TestId { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + TestId::Name(name) => write!(f, "{}", name), + TestId::Path(path) => write!(f, "{}", path), + } + } +} + +#[derive(Debug, Clone)] +pub enum RunnableKind { + Test { test_id: TestId, attr: TestAttr }, + TestMod { path: String }, + Bench { test_id: TestId }, + DocTest { test_id: TestId }, + Bin, +} + +#[derive(Debug, Eq, PartialEq)] +pub struct RunnableAction { + pub run_title: &'static str, + pub debugee: bool, +} + +const TEST: RunnableAction = RunnableAction { run_title: "▶\u{fe0e} Run Test", debugee: true }; +const DOCTEST: RunnableAction = + RunnableAction { run_title: "▶\u{fe0e} Run Doctest", debugee: false }; +const BENCH: RunnableAction = RunnableAction { run_title: "▶\u{fe0e} Run Bench", debugee: true }; +const BIN: RunnableAction = RunnableAction { run_title: "▶\u{fe0e} Run", debugee: true }; + +impl Runnable { + // test package::module::testname + pub fn label(&self, target: Option) -> String { + match &self.kind { + RunnableKind::Test { test_id, .. } => format!("test {}", test_id), + RunnableKind::TestMod { path } => format!("test-mod {}", path), + RunnableKind::Bench { test_id } => format!("bench {}", test_id), + RunnableKind::DocTest { test_id, .. } => format!("doctest {}", test_id), + RunnableKind::Bin => { + target.map_or_else(|| "run binary".to_string(), |t| format!("run {}", t)) + } + } + } + + pub fn action(&self) -> &'static RunnableAction { + match &self.kind { + RunnableKind::Test { .. } | RunnableKind::TestMod { .. } => &TEST, + RunnableKind::DocTest { .. } => &DOCTEST, + RunnableKind::Bench { .. } => &BENCH, + RunnableKind::Bin => &BIN, + } + } +} + +// Feature: Run +// +// Shows a popup suggesting to run a test/benchmark/binary **at the current cursor +// location**. Super useful for repeatedly running just a single test. Do bind this +// to a shortcut! +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Rust Analyzer: Run** +// |=== +pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec { + let sema = Semantics::new(db); + let source_file = sema.parse(file_id); + source_file.syntax().descendants().filter_map(|i| runnable(&sema, i, file_id)).collect() +} + +pub(crate) fn runnable( + sema: &Semantics, + item: SyntaxNode, + file_id: FileId, +) -> Option { + match_ast! { + match item { + ast::Fn(it) => runnable_fn(sema, it, file_id), + ast::Module(it) => runnable_mod(sema, it, file_id), + _ => None, + } + } +} + +fn runnable_fn( + sema: &Semantics, + fn_def: ast::Fn, + file_id: FileId, +) -> Option { + let name_string = fn_def.name()?.text().to_string(); + + let kind = if name_string == "main" { + RunnableKind::Bin + } else { + let test_id = match sema.to_def(&fn_def).map(|def| def.module(sema.db)) { + Some(module) => { + let def = sema.to_def(&fn_def)?; + let impl_trait_name = def.as_assoc_item(sema.db).and_then(|assoc_item| { + match assoc_item.container(sema.db) { + hir::AssocItemContainer::Trait(trait_item) => { + Some(trait_item.name(sema.db).to_string()) + } + hir::AssocItemContainer::ImplDef(impl_def) => impl_def + .target_ty(sema.db) + .as_adt() + .map(|adt| adt.name(sema.db).to_string()), + } + }); + + let path_iter = module + .path_to_root(sema.db) + .into_iter() + .rev() + .filter_map(|it| it.name(sema.db)) + .map(|name| name.to_string()); + + let path = if let Some(impl_trait_name) = impl_trait_name { + path_iter + .chain(std::iter::once(impl_trait_name)) + .chain(std::iter::once(name_string)) + .join("::") + } else { + path_iter.chain(std::iter::once(name_string)).join("::") + }; + + TestId::Path(path) + } + None => TestId::Name(name_string), + }; + + if has_test_related_attribute(&fn_def) { + let attr = TestAttr::from_fn(&fn_def); + RunnableKind::Test { test_id, attr } + } else if fn_def.has_atom_attr("bench") { + RunnableKind::Bench { test_id } + } else if has_doc_test(&fn_def) { + RunnableKind::DocTest { test_id } + } else { + return None; + } + }; + + let attrs = Attrs::from_attrs_owner(sema.db, InFile::new(HirFileId::from(file_id), &fn_def)); + let cfg_exprs = attrs.cfg().collect(); + + let nav = if let RunnableKind::DocTest { .. } = kind { + NavigationTarget::from_doc_commented( + sema.db, + InFile::new(file_id.into(), &fn_def), + InFile::new(file_id.into(), &fn_def), + ) + } else { + NavigationTarget::from_named(sema.db, InFile::new(file_id.into(), &fn_def)) + }; + Some(Runnable { nav, kind, cfg_exprs }) +} + +#[derive(Debug, Copy, Clone)] +pub struct TestAttr { + pub ignore: bool, +} + +impl TestAttr { + fn from_fn(fn_def: &ast::Fn) -> TestAttr { + let ignore = fn_def + .attrs() + .filter_map(|attr| attr.simple_name()) + .any(|attribute_text| attribute_text == "ignore"); + TestAttr { ignore } + } +} + +/// This is a method with a heuristics to support test methods annotated with custom test annotations, such as +/// `#[test_case(...)]`, `#[tokio::test]` and similar. +/// Also a regular `#[test]` annotation is supported. +/// +/// It may produce false positives, for example, `#[wasm_bindgen_test]` requires a different command to run the test, +/// but it's better than not to have the runnables for the tests at all. +fn has_test_related_attribute(fn_def: &ast::Fn) -> bool { + fn_def + .attrs() + .filter_map(|attr| attr.path()) + .map(|path| path.syntax().to_string().to_lowercase()) + .any(|attribute_text| attribute_text.contains("test")) +} + +fn has_doc_test(fn_def: &ast::Fn) -> bool { + fn_def.doc_comment_text().map_or(false, |comment| comment.contains("```")) +} + +fn runnable_mod( + sema: &Semantics, + module: ast::Module, + file_id: FileId, +) -> Option { + if !has_test_function_or_multiple_test_submodules(&module) { + return None; + } + let module_def = sema.to_def(&module)?; + + let path = module_def + .path_to_root(sema.db) + .into_iter() + .rev() + .filter_map(|it| it.name(sema.db)) + .join("::"); + + let attrs = Attrs::from_attrs_owner(sema.db, InFile::new(HirFileId::from(file_id), &module)); + let cfg_exprs = attrs.cfg().collect(); + let nav = module_def.to_nav(sema.db); + Some(Runnable { nav, kind: RunnableKind::TestMod { path }, cfg_exprs }) +} + +// We could create runnables for modules with number_of_test_submodules > 0, +// but that bloats the runnables for no real benefit, since all tests can be run by the submodule already +fn has_test_function_or_multiple_test_submodules(module: &ast::Module) -> bool { + if let Some(item_list) = module.item_list() { + let mut number_of_test_submodules = 0; + + for item in item_list.items() { + match item { + ast::Item::Fn(f) => { + if has_test_related_attribute(&f) { + return true; + } + } + ast::Item::Module(submodule) => { + if has_test_function_or_multiple_test_submodules(&submodule) { + number_of_test_submodules += 1; + } + } + _ => (), + } + } + + number_of_test_submodules > 1 + } else { + false + } +} + +#[cfg(test)] +mod tests { + use expect::{expect, Expect}; + + use crate::mock_analysis::analysis_and_position; + + use super::{RunnableAction, BENCH, BIN, DOCTEST, TEST}; + + fn check( + ra_fixture: &str, + // FIXME: fold this into `expect` as well + actions: &[&RunnableAction], + expect: Expect, + ) { + let (analysis, position) = analysis_and_position(ra_fixture); + let runnables = analysis.runnables(position.file_id).unwrap(); + expect.assert_debug_eq(&runnables); + assert_eq!( + actions, + runnables.into_iter().map(|it| it.action()).collect::>().as_slice() + ); + } + + #[test] + fn test_runnables() { + check( + r#" +//- /lib.rs +<|> +fn main() {} + +#[test] +fn test_foo() {} + +#[test] +#[ignore] +fn test_foo() {} + +#[bench] +fn bench() {} +"#, + &[&BIN, &TEST, &TEST, &BENCH], + expect![[r#" + [ + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 1..13, + focus_range: Some( + 4..8, + ), + name: "main", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Bin, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 15..39, + focus_range: Some( + 26..34, + ), + name: "test_foo", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "test_foo", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 41..75, + focus_range: Some( + 62..70, + ), + name: "test_foo", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "test_foo", + ), + attr: TestAttr { + ignore: true, + }, + }, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 77..99, + focus_range: Some( + 89..94, + ), + name: "bench", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Bench { + test_id: Path( + "bench", + ), + }, + cfg_exprs: [], + }, + ] + "#]], + ); + } + + #[test] + fn test_runnables_doc_test() { + check( + r#" +//- /lib.rs +<|> +fn main() {} + +/// ``` +/// let x = 5; +/// ``` +fn foo() {} +"#, + &[&BIN, &DOCTEST], + expect![[r#" + [ + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 1..13, + focus_range: Some( + 4..8, + ), + name: "main", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Bin, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 15..57, + focus_range: None, + name: "foo", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: DocTest { + test_id: Path( + "foo", + ), + }, + cfg_exprs: [], + }, + ] + "#]], + ); + } + + #[test] + fn test_runnables_doc_test_in_impl() { + check( + r#" +//- /lib.rs +<|> +fn main() {} + +struct Data; +impl Data { + /// ``` + /// let x = 5; + /// ``` + fn foo() {} +} +"#, + &[&BIN, &DOCTEST], + expect![[r#" + [ + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 1..13, + focus_range: Some( + 4..8, + ), + name: "main", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Bin, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 44..98, + focus_range: None, + name: "foo", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: DocTest { + test_id: Path( + "Data::foo", + ), + }, + cfg_exprs: [], + }, + ] + "#]], + ); + } + + #[test] + fn test_runnables_module() { + check( + r#" +//- /lib.rs +<|> +mod test_mod { + #[test] + fn test_foo1() {} +} +"#, + &[&TEST, &TEST], + expect![[r#" + [ + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 1..51, + focus_range: Some( + 5..13, + ), + name: "test_mod", + kind: MODULE, + container_name: None, + description: None, + docs: None, + }, + kind: TestMod { + path: "test_mod", + }, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 20..49, + focus_range: Some( + 35..44, + ), + name: "test_foo1", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "test_mod::test_foo1", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [], + }, + ] + "#]], + ); + } + + #[test] + fn only_modules_with_test_functions_or_more_than_one_test_submodule_have_runners() { + check( + r#" +//- /lib.rs +<|> +mod root_tests { + mod nested_tests_0 { + mod nested_tests_1 { + #[test] + fn nested_test_11() {} + + #[test] + fn nested_test_12() {} + } + + mod nested_tests_2 { + #[test] + fn nested_test_2() {} + } + + mod nested_tests_3 {} + } + + mod nested_tests_4 {} +} +"#, + &[&TEST, &TEST, &TEST, &TEST, &TEST, &TEST], + expect![[r#" + [ + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 22..323, + focus_range: Some( + 26..40, + ), + name: "nested_tests_0", + kind: MODULE, + container_name: None, + description: None, + docs: None, + }, + kind: TestMod { + path: "root_tests::nested_tests_0", + }, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 51..192, + focus_range: Some( + 55..69, + ), + name: "nested_tests_1", + kind: MODULE, + container_name: None, + description: None, + docs: None, + }, + kind: TestMod { + path: "root_tests::nested_tests_0::nested_tests_1", + }, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 84..126, + focus_range: Some( + 107..121, + ), + name: "nested_test_11", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "root_tests::nested_tests_0::nested_tests_1::nested_test_11", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 140..182, + focus_range: Some( + 163..177, + ), + name: "nested_test_12", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "root_tests::nested_tests_0::nested_tests_1::nested_test_12", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 202..286, + focus_range: Some( + 206..220, + ), + name: "nested_tests_2", + kind: MODULE, + container_name: None, + description: None, + docs: None, + }, + kind: TestMod { + path: "root_tests::nested_tests_0::nested_tests_2", + }, + cfg_exprs: [], + }, + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 235..276, + focus_range: Some( + 258..271, + ), + name: "nested_test_2", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "root_tests::nested_tests_0::nested_tests_2::nested_test_2", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [], + }, + ] + "#]], + ); + } + + #[test] + fn test_runnables_with_feature() { + check( + r#" +//- /lib.rs crate:foo cfg:feature=foo +<|> +#[test] +#[cfg(feature = "foo")] +fn test_foo1() {} +"#, + &[&TEST], + expect![[r#" + [ + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 1..50, + focus_range: Some( + 36..45, + ), + name: "test_foo1", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "test_foo1", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [ + KeyValue { + key: "feature", + value: "foo", + }, + ], + }, + ] + "#]], + ); + } + + #[test] + fn test_runnables_with_features() { + check( + r#" +//- /lib.rs crate:foo cfg:feature=foo,feature=bar +<|> +#[test] +#[cfg(all(feature = "foo", feature = "bar"))] +fn test_foo1() {} +"#, + &[&TEST], + expect![[r#" + [ + Runnable { + nav: NavigationTarget { + file_id: FileId( + 1, + ), + full_range: 1..72, + focus_range: Some( + 58..67, + ), + name: "test_foo1", + kind: FN, + container_name: None, + description: None, + docs: None, + }, + kind: Test { + test_id: Path( + "test_foo1", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg_exprs: [ + All( + [ + KeyValue { + key: "feature", + value: "foo", + }, + KeyValue { + key: "feature", + value: "bar", + }, + ], + ), + ], + }, + ] + "#]], + ); + } + + #[test] + fn test_runnables_no_test_function_in_module() { + check( + r#" +//- /lib.rs +<|> +mod test_mod { + fn foo1() {} +} +"#, + &[], + expect![[r#" + [] + "#]], + ); + } +} diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs new file mode 100644 index 000000000..c23708181 --- /dev/null +++ b/crates/ide/src/status.rs @@ -0,0 +1,145 @@ +use std::{fmt, iter::FromIterator, sync::Arc}; + +use base_db::{ + salsa::debug::{DebugQueryTable, TableEntry}, + FileTextQuery, SourceRootId, +}; +use hir::MacroFile; +use ide_db::{ + symbol_index::{LibrarySymbolsQuery, SymbolIndex}, + RootDatabase, +}; +use profile::{memory_usage, Bytes}; +use rustc_hash::FxHashMap; +use syntax::{ast, Parse, SyntaxNode}; + +use crate::FileId; + +fn syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { + base_db::ParseQuery.in_db(db).entries::() +} +fn macro_syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { + hir::db::ParseMacroQuery.in_db(db).entries::() +} + +// Feature: Status +// +// Shows internal statistic about memory usage of rust-analyzer. +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Rust Analyzer: Status** +// |=== +pub(crate) fn status(db: &RootDatabase) -> String { + let files_stats = FileTextQuery.in_db(db).entries::(); + let syntax_tree_stats = syntax_tree_stats(db); + let macro_syntax_tree_stats = macro_syntax_tree_stats(db); + let symbols_stats = LibrarySymbolsQuery.in_db(db).entries::(); + format!( + "{}\n{}\n{}\n{} (macros)\n\n\nmemory:\n{}\ngc {:?} seconds ago", + files_stats, + symbols_stats, + syntax_tree_stats, + macro_syntax_tree_stats, + memory_usage(), + db.last_gc.elapsed().as_secs(), + ) +} + +#[derive(Default)] +struct FilesStats { + total: usize, + size: Bytes, +} + +impl fmt::Display for FilesStats { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + write!(fmt, "{} ({}) files", self.total, self.size) + } +} + +impl FromIterator>> for FilesStats { + fn from_iter(iter: T) -> FilesStats + where + T: IntoIterator>>, + { + let mut res = FilesStats::default(); + for entry in iter { + res.total += 1; + res.size += entry.value.unwrap().len(); + } + res + } +} + +#[derive(Default)] +pub(crate) struct SyntaxTreeStats { + total: usize, + pub(crate) retained: usize, +} + +impl fmt::Display for SyntaxTreeStats { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + write!(fmt, "{} trees, {} retained", self.total, self.retained) + } +} + +impl FromIterator>> for SyntaxTreeStats { + fn from_iter(iter: T) -> SyntaxTreeStats + where + T: IntoIterator>>, + { + let mut res = SyntaxTreeStats::default(); + for entry in iter { + res.total += 1; + res.retained += entry.value.is_some() as usize; + } + res + } +} + +impl FromIterator, M)>>> for SyntaxTreeStats { + fn from_iter(iter: T) -> SyntaxTreeStats + where + T: IntoIterator, M)>>>, + { + let mut res = SyntaxTreeStats::default(); + for entry in iter { + res.total += 1; + res.retained += entry.value.is_some() as usize; + } + res + } +} + +#[derive(Default)] +struct LibrarySymbolsStats { + total: usize, + size: Bytes, +} + +impl fmt::Display for LibrarySymbolsStats { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + write!(fmt, "{} ({}) symbols", self.total, self.size) + } +} + +impl FromIterator>>> + for LibrarySymbolsStats +{ + fn from_iter(iter: T) -> LibrarySymbolsStats + where + T: IntoIterator>>>, + { + let mut res = LibrarySymbolsStats::default(); + for entry in iter { + let value = entry.value.unwrap(); + for symbols in value.values() { + res.total += symbols.len(); + res.size += symbols.memory_size(); + } + } + res + } +} diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs new file mode 100644 index 000000000..5d7c7e8d0 --- /dev/null +++ b/crates/ide/src/syntax_highlighting.rs @@ -0,0 +1,872 @@ +mod tags; +mod html; +mod injection; +#[cfg(test)] +mod tests; + +use hir::{Name, Semantics, VariantDef}; +use ide_db::{ + defs::{classify_name, classify_name_ref, Definition, NameClass, NameRefClass}, + RootDatabase, +}; +use rustc_hash::FxHashMap; +use syntax::{ + ast::{self, HasFormatSpecifier}, + AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, + SyntaxKind::*, + TextRange, WalkEvent, T, +}; + +use crate::FileId; + +use ast::FormatSpecifier; +pub(crate) use html::highlight_as_html; +pub use tags::{Highlight, HighlightModifier, HighlightModifiers, HighlightTag}; + +#[derive(Debug, Clone)] +pub struct HighlightedRange { + pub range: TextRange, + pub highlight: Highlight, + pub binding_hash: Option, +} + +// Feature: Semantic Syntax Highlighting +// +// rust-analyzer highlights the code semantically. +// For example, `bar` in `foo::Bar` might be colored differently depending on whether `Bar` is an enum or a trait. +// rust-analyzer does not specify colors directly, instead it assigns tag (like `struct`) and a set of modifiers (like `declaration`) to each token. +// It's up to the client to map those to specific colors. +// +// The general rule is that a reference to an entity gets colored the same way as the entity itself. +// We also give special modifier for `mut` and `&mut` local variables. +pub(crate) fn highlight( + db: &RootDatabase, + file_id: FileId, + range_to_highlight: Option, + syntactic_name_ref_highlighting: bool, +) -> Vec { + let _p = profile::span("highlight"); + let sema = Semantics::new(db); + + // Determine the root based on the given range. + let (root, range_to_highlight) = { + let source_file = sema.parse(file_id); + match range_to_highlight { + Some(range) => { + let node = match source_file.syntax().covering_element(range) { + NodeOrToken::Node(it) => it, + NodeOrToken::Token(it) => it.parent(), + }; + (node, range) + } + None => (source_file.syntax().clone(), source_file.syntax().text_range()), + } + }; + + let mut bindings_shadow_count: FxHashMap = FxHashMap::default(); + // We use a stack for the DFS traversal below. + // When we leave a node, the we use it to flatten the highlighted ranges. + let mut stack = HighlightedRangeStack::new(); + + let mut current_macro_call: Option = None; + let mut format_string: Option = None; + + // Walk all nodes, keeping track of whether we are inside a macro or not. + // If in macro, expand it first and highlight the expanded code. + for event in root.preorder_with_tokens() { + match &event { + WalkEvent::Enter(_) => stack.push(), + WalkEvent::Leave(_) => stack.pop(), + }; + + let event_range = match &event { + WalkEvent::Enter(it) => it.text_range(), + WalkEvent::Leave(it) => it.text_range(), + }; + + // Element outside of the viewport, no need to highlight + if range_to_highlight.intersect(event_range).is_none() { + continue; + } + + // Track "inside macro" state + match event.clone().map(|it| it.into_node().and_then(ast::MacroCall::cast)) { + WalkEvent::Enter(Some(mc)) => { + current_macro_call = Some(mc.clone()); + if let Some(range) = macro_call_range(&mc) { + stack.add(HighlightedRange { + range, + highlight: HighlightTag::Macro.into(), + binding_hash: None, + }); + } + if let Some(name) = mc.is_macro_rules() { + if let Some((highlight, binding_hash)) = highlight_element( + &sema, + &mut bindings_shadow_count, + syntactic_name_ref_highlighting, + name.syntax().clone().into(), + ) { + stack.add(HighlightedRange { + range: name.syntax().text_range(), + highlight, + binding_hash, + }); + } + } + continue; + } + WalkEvent::Leave(Some(mc)) => { + assert!(current_macro_call == Some(mc)); + current_macro_call = None; + format_string = None; + } + _ => (), + } + + // Check for Rust code in documentation + match &event { + WalkEvent::Leave(NodeOrToken::Node(node)) => { + if let Some((doctest, range_mapping, new_comments)) = + injection::extract_doc_comments(node) + { + injection::highlight_doc_comment( + doctest, + range_mapping, + new_comments, + &mut stack, + ); + } + } + _ => (), + } + + let element = match event { + WalkEvent::Enter(it) => it, + WalkEvent::Leave(_) => continue, + }; + + let range = element.text_range(); + + let element_to_highlight = if current_macro_call.is_some() && element.kind() != COMMENT { + // Inside a macro -- expand it first + let token = match element.clone().into_token() { + Some(it) if it.parent().kind() == TOKEN_TREE => it, + _ => continue, + }; + let token = sema.descend_into_macros(token.clone()); + let parent = token.parent(); + + // Check if macro takes a format string and remember it for highlighting later. + // The macros that accept a format string expand to a compiler builtin macros + // `format_args` and `format_args_nl`. + if let Some(name) = parent + .parent() + .and_then(ast::MacroCall::cast) + .and_then(|mc| mc.path()) + .and_then(|p| p.segment()) + .and_then(|s| s.name_ref()) + { + match name.text().as_str() { + "format_args" | "format_args_nl" => { + format_string = parent + .children_with_tokens() + .filter(|t| t.kind() != WHITESPACE) + .nth(1) + .filter(|e| { + ast::String::can_cast(e.kind()) + || ast::RawString::can_cast(e.kind()) + }) + } + _ => {} + } + } + + // We only care Name and Name_ref + match (token.kind(), parent.kind()) { + (IDENT, NAME) | (IDENT, NAME_REF) => parent.into(), + _ => token.into(), + } + } else { + element.clone() + }; + + if let Some(token) = element.as_token().cloned().and_then(ast::RawString::cast) { + let expanded = element_to_highlight.as_token().unwrap().clone(); + if injection::highlight_injection(&mut stack, &sema, token, expanded).is_some() { + continue; + } + } + + let is_format_string = format_string.as_ref() == Some(&element_to_highlight); + + if let Some((highlight, binding_hash)) = highlight_element( + &sema, + &mut bindings_shadow_count, + syntactic_name_ref_highlighting, + element_to_highlight.clone(), + ) { + stack.add(HighlightedRange { range, highlight, binding_hash }); + if let Some(string) = + element_to_highlight.as_token().cloned().and_then(ast::String::cast) + { + if is_format_string { + stack.push(); + string.lex_format_specifier(|piece_range, kind| { + if let Some(highlight) = highlight_format_specifier(kind) { + stack.add(HighlightedRange { + range: piece_range + range.start(), + highlight: highlight.into(), + binding_hash: None, + }); + } + }); + stack.pop(); + } + // Highlight escape sequences + if let Some(char_ranges) = string.char_ranges() { + stack.push(); + for (piece_range, _) in char_ranges.iter().filter(|(_, char)| char.is_ok()) { + if string.text()[piece_range.start().into()..].starts_with('\\') { + stack.add(HighlightedRange { + range: piece_range + range.start(), + highlight: HighlightTag::EscapeSequence.into(), + binding_hash: None, + }); + } + } + stack.pop_and_inject(None); + } + } else if let Some(string) = + element_to_highlight.as_token().cloned().and_then(ast::RawString::cast) + { + if is_format_string { + stack.push(); + string.lex_format_specifier(|piece_range, kind| { + if let Some(highlight) = highlight_format_specifier(kind) { + stack.add(HighlightedRange { + range: piece_range + range.start(), + highlight: highlight.into(), + binding_hash: None, + }); + } + }); + stack.pop(); + } + } + } + } + + stack.flattened() +} + +#[derive(Debug)] +struct HighlightedRangeStack { + stack: Vec>, +} + +/// We use a stack to implement the flattening logic for the highlighted +/// syntax ranges. +impl HighlightedRangeStack { + fn new() -> Self { + Self { stack: vec![Vec::new()] } + } + + fn push(&mut self) { + self.stack.push(Vec::new()); + } + + /// Flattens the highlighted ranges. + /// + /// For example `#[cfg(feature = "foo")]` contains the nested ranges: + /// 1) parent-range: Attribute [0, 23) + /// 2) child-range: String [16, 21) + /// + /// The following code implements the flattening, for our example this results to: + /// `[Attribute [0, 16), String [16, 21), Attribute [21, 23)]` + fn pop(&mut self) { + let children = self.stack.pop().unwrap(); + let prev = self.stack.last_mut().unwrap(); + let needs_flattening = !children.is_empty() + && !prev.is_empty() + && prev.last().unwrap().range.contains_range(children.first().unwrap().range); + if !needs_flattening { + prev.extend(children); + } else { + let mut parent = prev.pop().unwrap(); + for ele in children { + assert!(parent.range.contains_range(ele.range)); + + let cloned = Self::intersect(&mut parent, &ele); + if !parent.range.is_empty() { + prev.push(parent); + } + prev.push(ele); + parent = cloned; + } + if !parent.range.is_empty() { + prev.push(parent); + } + } + } + + /// Intersects the `HighlightedRange` `parent` with `child`. + /// `parent` is mutated in place, becoming the range before `child`. + /// Returns the range (of the same type as `parent`) *after* `child`. + fn intersect(parent: &mut HighlightedRange, child: &HighlightedRange) -> HighlightedRange { + assert!(parent.range.contains_range(child.range)); + + let mut cloned = parent.clone(); + parent.range = TextRange::new(parent.range.start(), child.range.start()); + cloned.range = TextRange::new(child.range.end(), cloned.range.end()); + + cloned + } + + /// Remove the `HighlightRange` of `parent` that's currently covered by `child`. + fn intersect_partial(parent: &mut HighlightedRange, child: &HighlightedRange) { + assert!( + parent.range.start() <= child.range.start() + && parent.range.end() >= child.range.start() + && child.range.end() > parent.range.end() + ); + + parent.range = TextRange::new(parent.range.start(), child.range.start()); + } + + /// Similar to `pop`, but can modify arbitrary prior ranges (where `pop`) + /// can only modify the last range currently on the stack. + /// Can be used to do injections that span multiple ranges, like the + /// doctest injection below. + /// If `overwrite_parent` is non-optional, the highlighting of the parent range + /// is overwritten with the argument. + /// + /// Note that `pop` can be simulated by `pop_and_inject(false)` but the + /// latter is computationally more expensive. + fn pop_and_inject(&mut self, overwrite_parent: Option) { + let mut children = self.stack.pop().unwrap(); + let prev = self.stack.last_mut().unwrap(); + children.sort_by_key(|range| range.range.start()); + prev.sort_by_key(|range| range.range.start()); + + for child in children { + if let Some(idx) = + prev.iter().position(|parent| parent.range.contains_range(child.range)) + { + if let Some(tag) = overwrite_parent { + prev[idx].highlight = tag; + } + + let cloned = Self::intersect(&mut prev[idx], &child); + let insert_idx = if prev[idx].range.is_empty() { + prev.remove(idx); + idx + } else { + idx + 1 + }; + prev.insert(insert_idx, child); + if !cloned.range.is_empty() { + prev.insert(insert_idx + 1, cloned); + } + } else { + let maybe_idx = + prev.iter().position(|parent| parent.range.contains(child.range.start())); + match (overwrite_parent, maybe_idx) { + (Some(_), Some(idx)) => { + Self::intersect_partial(&mut prev[idx], &child); + let insert_idx = if prev[idx].range.is_empty() { + prev.remove(idx); + idx + } else { + idx + 1 + }; + prev.insert(insert_idx, child); + } + (_, None) => { + let idx = prev + .binary_search_by_key(&child.range.start(), |range| range.range.start()) + .unwrap_or_else(|x| x); + prev.insert(idx, child); + } + _ => { + unreachable!("child range should be completely contained in parent range"); + } + } + } + } + } + + fn add(&mut self, range: HighlightedRange) { + self.stack + .last_mut() + .expect("during DFS traversal, the stack must not be empty") + .push(range) + } + + fn flattened(mut self) -> Vec { + assert_eq!( + self.stack.len(), + 1, + "after DFS traversal, the stack should only contain a single element" + ); + let mut res = self.stack.pop().unwrap(); + res.sort_by_key(|range| range.range.start()); + // Check that ranges are sorted and disjoint + assert!(res + .iter() + .zip(res.iter().skip(1)) + .all(|(left, right)| left.range.end() <= right.range.start())); + res + } +} + +fn highlight_format_specifier(kind: FormatSpecifier) -> Option { + Some(match kind { + FormatSpecifier::Open + | FormatSpecifier::Close + | FormatSpecifier::Colon + | FormatSpecifier::Fill + | FormatSpecifier::Align + | FormatSpecifier::Sign + | FormatSpecifier::NumberSign + | FormatSpecifier::DollarSign + | FormatSpecifier::Dot + | FormatSpecifier::Asterisk + | FormatSpecifier::QuestionMark => HighlightTag::FormatSpecifier, + FormatSpecifier::Integer | FormatSpecifier::Zero => HighlightTag::NumericLiteral, + FormatSpecifier::Identifier => HighlightTag::Local, + }) +} + +fn macro_call_range(macro_call: &ast::MacroCall) -> Option { + let path = macro_call.path()?; + let name_ref = path.segment()?.name_ref()?; + + let range_start = name_ref.syntax().text_range().start(); + let mut range_end = name_ref.syntax().text_range().end(); + for sibling in path.syntax().siblings_with_tokens(Direction::Next) { + match sibling.kind() { + T![!] | IDENT => range_end = sibling.text_range().end(), + _ => (), + } + } + + Some(TextRange::new(range_start, range_end)) +} + +fn is_possibly_unsafe(name_ref: &ast::NameRef) -> bool { + name_ref + .syntax() + .parent() + .and_then(|parent| { + ast::FieldExpr::cast(parent.clone()) + .map(|_| true) + .or_else(|| ast::RecordPatField::cast(parent).map(|_| true)) + }) + .unwrap_or(false) +} + +fn highlight_element( + sema: &Semantics, + bindings_shadow_count: &mut FxHashMap, + syntactic_name_ref_highlighting: bool, + element: SyntaxElement, +) -> Option<(Highlight, Option)> { + let db = sema.db; + let mut binding_hash = None; + let highlight: Highlight = match element.kind() { + FN => { + bindings_shadow_count.clear(); + return None; + } + + // Highlight definitions depending on the "type" of the definition. + NAME => { + let name = element.into_node().and_then(ast::Name::cast).unwrap(); + let name_kind = classify_name(sema, &name); + + if let Some(NameClass::Definition(Definition::Local(local))) = &name_kind { + if let Some(name) = local.name(db) { + let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); + *shadow_count += 1; + binding_hash = Some(calc_binding_hash(&name, *shadow_count)) + } + }; + + match name_kind { + Some(NameClass::ExternCrate(_)) => HighlightTag::Module.into(), + Some(NameClass::Definition(def)) => { + highlight_name(sema, db, def, None, false) | HighlightModifier::Definition + } + Some(NameClass::ConstReference(def)) => highlight_name(sema, db, def, None, false), + Some(NameClass::FieldShorthand { field, .. }) => { + let mut h = HighlightTag::Field.into(); + if let Definition::Field(field) = field { + if let VariantDef::Union(_) = field.parent_def(db) { + h |= HighlightModifier::Unsafe; + } + } + + h + } + None => highlight_name_by_syntax(name) | HighlightModifier::Definition, + } + } + + // Highlight references like the definitions they resolve to + NAME_REF if element.ancestors().any(|it| it.kind() == ATTR) => { + Highlight::from(HighlightTag::Function) | HighlightModifier::Attribute + } + NAME_REF => { + let name_ref = element.into_node().and_then(ast::NameRef::cast).unwrap(); + let possibly_unsafe = is_possibly_unsafe(&name_ref); + match classify_name_ref(sema, &name_ref) { + Some(name_kind) => match name_kind { + NameRefClass::ExternCrate(_) => HighlightTag::Module.into(), + NameRefClass::Definition(def) => { + if let Definition::Local(local) = &def { + if let Some(name) = local.name(db) { + let shadow_count = + bindings_shadow_count.entry(name.clone()).or_default(); + binding_hash = Some(calc_binding_hash(&name, *shadow_count)) + } + }; + highlight_name(sema, db, def, Some(name_ref), possibly_unsafe) + } + NameRefClass::FieldShorthand { .. } => HighlightTag::Field.into(), + }, + None if syntactic_name_ref_highlighting => { + highlight_name_ref_by_syntax(name_ref, sema) + } + None => HighlightTag::UnresolvedReference.into(), + } + } + + // Simple token-based highlighting + COMMENT => { + let comment = element.into_token().and_then(ast::Comment::cast)?; + let h = HighlightTag::Comment; + match comment.kind().doc { + Some(_) => h | HighlightModifier::Documentation, + None => h.into(), + } + } + STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => HighlightTag::StringLiteral.into(), + ATTR => HighlightTag::Attribute.into(), + INT_NUMBER | FLOAT_NUMBER => HighlightTag::NumericLiteral.into(), + BYTE => HighlightTag::ByteLiteral.into(), + CHAR => HighlightTag::CharLiteral.into(), + QUESTION => Highlight::new(HighlightTag::Operator) | HighlightModifier::ControlFlow, + LIFETIME => { + let h = Highlight::new(HighlightTag::Lifetime); + match element.parent().map(|it| it.kind()) { + Some(LIFETIME_PARAM) | Some(LABEL) => h | HighlightModifier::Definition, + _ => h, + } + } + p if p.is_punct() => match p { + T![&] => { + let h = HighlightTag::Operator.into(); + let is_unsafe = element + .parent() + .and_then(ast::RefExpr::cast) + .map(|ref_expr| sema.is_unsafe_ref_expr(&ref_expr)) + .unwrap_or(false); + if is_unsafe { + h | HighlightModifier::Unsafe + } else { + h + } + } + T![::] | T![->] | T![=>] | T![..] | T![=] | T![@] => HighlightTag::Operator.into(), + T![!] if element.parent().and_then(ast::MacroCall::cast).is_some() => { + HighlightTag::Macro.into() + } + T![*] if element.parent().and_then(ast::PtrType::cast).is_some() => { + HighlightTag::Keyword.into() + } + T![*] if element.parent().and_then(ast::PrefixExpr::cast).is_some() => { + let prefix_expr = element.parent().and_then(ast::PrefixExpr::cast)?; + + let expr = prefix_expr.expr()?; + let ty = sema.type_of_expr(&expr)?; + if ty.is_raw_ptr() { + HighlightTag::Operator | HighlightModifier::Unsafe + } else if let Some(ast::PrefixOp::Deref) = prefix_expr.op_kind() { + HighlightTag::Operator.into() + } else { + HighlightTag::Punctuation.into() + } + } + T![-] if element.parent().and_then(ast::PrefixExpr::cast).is_some() => { + HighlightTag::NumericLiteral.into() + } + _ if element.parent().and_then(ast::PrefixExpr::cast).is_some() => { + HighlightTag::Operator.into() + } + _ if element.parent().and_then(ast::BinExpr::cast).is_some() => { + HighlightTag::Operator.into() + } + _ if element.parent().and_then(ast::RangeExpr::cast).is_some() => { + HighlightTag::Operator.into() + } + _ if element.parent().and_then(ast::RangePat::cast).is_some() => { + HighlightTag::Operator.into() + } + _ if element.parent().and_then(ast::RestPat::cast).is_some() => { + HighlightTag::Operator.into() + } + _ if element.parent().and_then(ast::Attr::cast).is_some() => { + HighlightTag::Attribute.into() + } + _ => HighlightTag::Punctuation.into(), + }, + + k if k.is_keyword() => { + let h = Highlight::new(HighlightTag::Keyword); + match k { + T![break] + | T![continue] + | T![else] + | T![if] + | T![loop] + | T![match] + | T![return] + | T![while] + | T![in] => h | HighlightModifier::ControlFlow, + T![for] if !is_child_of_impl(&element) => h | HighlightModifier::ControlFlow, + T![unsafe] => h | HighlightModifier::Unsafe, + T![true] | T![false] => HighlightTag::BoolLiteral.into(), + T![self] => { + let self_param_is_mut = element + .parent() + .and_then(ast::SelfParam::cast) + .and_then(|p| p.mut_token()) + .is_some(); + // closure to enforce lazyness + let self_path = || { + sema.resolve_path(&element.parent()?.parent().and_then(ast::Path::cast)?) + }; + if self_param_is_mut + || matches!(self_path(), + Some(hir::PathResolution::Local(local)) + if local.is_self(db) + && (local.is_mut(db) || local.ty(db).is_mutable_reference()) + ) + { + HighlightTag::SelfKeyword | HighlightModifier::Mutable + } else { + HighlightTag::SelfKeyword.into() + } + } + T![ref] => element + .parent() + .and_then(ast::IdentPat::cast) + .and_then(|ident_pat| { + if sema.is_unsafe_ident_pat(&ident_pat) { + Some(HighlightModifier::Unsafe) + } else { + None + } + }) + .map(|modifier| h | modifier) + .unwrap_or(h), + _ => h, + } + } + + _ => return None, + }; + + return Some((highlight, binding_hash)); + + fn calc_binding_hash(name: &Name, shadow_count: u32) -> u64 { + fn hash(x: T) -> u64 { + use std::{collections::hash_map::DefaultHasher, hash::Hasher}; + + let mut hasher = DefaultHasher::new(); + x.hash(&mut hasher); + hasher.finish() + } + + hash((name, shadow_count)) + } +} + +fn is_child_of_impl(element: &SyntaxElement) -> bool { + match element.parent() { + Some(e) => e.kind() == IMPL, + _ => false, + } +} + +fn highlight_name( + sema: &Semantics, + db: &RootDatabase, + def: Definition, + name_ref: Option, + possibly_unsafe: bool, +) -> Highlight { + match def { + Definition::Macro(_) => HighlightTag::Macro, + Definition::Field(field) => { + let mut h = HighlightTag::Field.into(); + if possibly_unsafe { + if let VariantDef::Union(_) = field.parent_def(db) { + h |= HighlightModifier::Unsafe; + } + } + + return h; + } + Definition::ModuleDef(def) => match def { + hir::ModuleDef::Module(_) => HighlightTag::Module, + hir::ModuleDef::Function(func) => { + let mut h = HighlightTag::Function.into(); + if func.is_unsafe(db) { + h |= HighlightModifier::Unsafe; + } else { + let is_unsafe = name_ref + .and_then(|name_ref| name_ref.syntax().parent()) + .and_then(ast::MethodCallExpr::cast) + .map(|method_call_expr| sema.is_unsafe_method_call(method_call_expr)) + .unwrap_or(false); + if is_unsafe { + h |= HighlightModifier::Unsafe; + } + } + return h; + } + hir::ModuleDef::Adt(hir::Adt::Struct(_)) => HighlightTag::Struct, + hir::ModuleDef::Adt(hir::Adt::Enum(_)) => HighlightTag::Enum, + hir::ModuleDef::Adt(hir::Adt::Union(_)) => HighlightTag::Union, + hir::ModuleDef::EnumVariant(_) => HighlightTag::EnumVariant, + hir::ModuleDef::Const(_) => HighlightTag::Constant, + hir::ModuleDef::Trait(_) => HighlightTag::Trait, + hir::ModuleDef::TypeAlias(_) => HighlightTag::TypeAlias, + hir::ModuleDef::BuiltinType(_) => HighlightTag::BuiltinType, + hir::ModuleDef::Static(s) => { + let mut h = Highlight::new(HighlightTag::Static); + if s.is_mut(db) { + h |= HighlightModifier::Mutable; + h |= HighlightModifier::Unsafe; + } + return h; + } + }, + Definition::SelfType(_) => HighlightTag::SelfType, + Definition::TypeParam(_) => HighlightTag::TypeParam, + Definition::Local(local) => { + let tag = + if local.is_param(db) { HighlightTag::ValueParam } else { HighlightTag::Local }; + let mut h = Highlight::new(tag); + if local.is_mut(db) || local.ty(db).is_mutable_reference() { + h |= HighlightModifier::Mutable; + } + return h; + } + } + .into() +} + +fn highlight_name_by_syntax(name: ast::Name) -> Highlight { + let default = HighlightTag::UnresolvedReference; + + let parent = match name.syntax().parent() { + Some(it) => it, + _ => return default.into(), + }; + + let tag = match parent.kind() { + STRUCT => HighlightTag::Struct, + ENUM => HighlightTag::Enum, + UNION => HighlightTag::Union, + TRAIT => HighlightTag::Trait, + TYPE_ALIAS => HighlightTag::TypeAlias, + TYPE_PARAM => HighlightTag::TypeParam, + RECORD_FIELD => HighlightTag::Field, + MODULE => HighlightTag::Module, + FN => HighlightTag::Function, + CONST => HighlightTag::Constant, + STATIC => HighlightTag::Static, + VARIANT => HighlightTag::EnumVariant, + IDENT_PAT => HighlightTag::Local, + _ => default, + }; + + tag.into() +} + +fn highlight_name_ref_by_syntax(name: ast::NameRef, sema: &Semantics) -> Highlight { + let default = HighlightTag::UnresolvedReference; + + let parent = match name.syntax().parent() { + Some(it) => it, + _ => return default.into(), + }; + + match parent.kind() { + METHOD_CALL_EXPR => { + let mut h = Highlight::new(HighlightTag::Function); + let is_unsafe = ast::MethodCallExpr::cast(parent) + .map(|method_call_expr| sema.is_unsafe_method_call(method_call_expr)) + .unwrap_or(false); + if is_unsafe { + h |= HighlightModifier::Unsafe; + } + + h + } + FIELD_EXPR => { + let h = HighlightTag::Field; + let is_union = ast::FieldExpr::cast(parent) + .and_then(|field_expr| { + let field = sema.resolve_field(&field_expr)?; + Some(if let VariantDef::Union(_) = field.parent_def(sema.db) { + true + } else { + false + }) + }) + .unwrap_or(false); + if is_union { + h | HighlightModifier::Unsafe + } else { + h.into() + } + } + PATH_SEGMENT => { + let path = match parent.parent().and_then(ast::Path::cast) { + Some(it) => it, + _ => return default.into(), + }; + let expr = match path.syntax().parent().and_then(ast::PathExpr::cast) { + Some(it) => it, + _ => { + // within path, decide whether it is module or adt by checking for uppercase name + return if name.text().chars().next().unwrap_or_default().is_uppercase() { + HighlightTag::Struct + } else { + HighlightTag::Module + } + .into(); + } + }; + let parent = match expr.syntax().parent() { + Some(it) => it, + None => return default.into(), + }; + + match parent.kind() { + CALL_EXPR => HighlightTag::Function.into(), + _ => if name.text().chars().next().unwrap_or_default().is_uppercase() { + HighlightTag::Struct.into() + } else { + HighlightTag::Constant + } + .into(), + } + } + _ => default.into(), + } +} diff --git a/crates/ide/src/syntax_highlighting/html.rs b/crates/ide/src/syntax_highlighting/html.rs new file mode 100644 index 000000000..249368ff8 --- /dev/null +++ b/crates/ide/src/syntax_highlighting/html.rs @@ -0,0 +1,97 @@ +//! Renders a bit of code as HTML. + +use base_db::SourceDatabase; +use oorandom::Rand32; +use syntax::{AstNode, TextRange, TextSize}; + +use crate::{syntax_highlighting::highlight, FileId, RootDatabase}; + +pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { + let parse = db.parse(file_id); + + fn rainbowify(seed: u64) -> String { + let mut rng = Rand32::new(seed); + format!( + "hsl({h},{s}%,{l}%)", + h = rng.rand_range(0..361), + s = rng.rand_range(42..99), + l = rng.rand_range(40..91), + ) + } + + let ranges = highlight(db, file_id, None, false); + let text = parse.tree().syntax().to_string(); + let mut prev_pos = TextSize::from(0); + let mut buf = String::new(); + buf.push_str(&STYLE); + buf.push_str("
");
+    for range in &ranges {
+        if range.range.start() > prev_pos {
+            let curr = &text[TextRange::new(prev_pos, range.range.start())];
+            let text = html_escape(curr);
+            buf.push_str(&text);
+        }
+        let curr = &text[TextRange::new(range.range.start(), range.range.end())];
+
+        let class = range.highlight.to_string().replace('.', " ");
+        let color = match (rainbow, range.binding_hash) {
+            (true, Some(hash)) => {
+                format!(" data-binding-hash=\"{}\" style=\"color: {};\"", hash, rainbowify(hash))
+            }
+            _ => "".into(),
+        };
+        buf.push_str(&format!("{}", class, color, html_escape(curr)));
+
+        prev_pos = range.range.end();
+    }
+    // Add the remaining (non-highlighted) text
+    let curr = &text[TextRange::new(prev_pos, TextSize::of(&text))];
+    let text = html_escape(curr);
+    buf.push_str(&text);
+    buf.push_str("
"); + buf +} + +//FIXME: like, real html escaping +fn html_escape(text: &str) -> String { + text.replace("<", "<").replace(">", ">") +} + +const STYLE: &str = " + +"; diff --git a/crates/ide/src/syntax_highlighting/injection.rs b/crates/ide/src/syntax_highlighting/injection.rs new file mode 100644 index 000000000..43f4e6fea --- /dev/null +++ b/crates/ide/src/syntax_highlighting/injection.rs @@ -0,0 +1,187 @@ +//! Syntax highlighting injections such as highlighting of documentation tests. + +use std::{collections::BTreeMap, convert::TryFrom}; + +use ast::{HasQuotes, HasStringValue}; +use hir::Semantics; +use itertools::Itertools; +use syntax::{ast, AstToken, SyntaxNode, SyntaxToken, TextRange, TextSize}; + +use crate::{ + call_info::ActiveParameter, Analysis, Highlight, HighlightModifier, HighlightTag, + HighlightedRange, RootDatabase, +}; + +use super::HighlightedRangeStack; + +pub(super) fn highlight_injection( + acc: &mut HighlightedRangeStack, + sema: &Semantics, + literal: ast::RawString, + expanded: SyntaxToken, +) -> Option<()> { + let active_parameter = ActiveParameter::at_token(&sema, expanded)?; + if !active_parameter.name.starts_with("ra_fixture") { + return None; + } + let value = literal.value()?; + let (analysis, tmp_file_id) = Analysis::from_single_file(value.into_owned()); + + if let Some(range) = literal.open_quote_text_range() { + acc.add(HighlightedRange { + range, + highlight: HighlightTag::StringLiteral.into(), + binding_hash: None, + }) + } + + for mut h in analysis.highlight(tmp_file_id).unwrap() { + if let Some(r) = literal.map_range_up(h.range) { + h.range = r; + acc.add(h) + } + } + + if let Some(range) = literal.close_quote_text_range() { + acc.add(HighlightedRange { + range, + highlight: HighlightTag::StringLiteral.into(), + binding_hash: None, + }) + } + + Some(()) +} + +/// Mapping from extracted documentation code to original code +type RangesMap = BTreeMap; + +const RUSTDOC_FENCE: &'static str = "```"; +const RUSTDOC_FENCE_TOKENS: &[&'static str] = + &["", "rust", "should_panic", "ignore", "no_run", "compile_fail", "edition2015", "edition2018"]; + +/// Extracts Rust code from documentation comments as well as a mapping from +/// the extracted source code back to the original source ranges. +/// Lastly, a vector of new comment highlight ranges (spanning only the +/// comment prefix) is returned which is used in the syntax highlighting +/// injection to replace the previous (line-spanning) comment ranges. +pub(super) fn extract_doc_comments( + node: &SyntaxNode, +) -> Option<(String, RangesMap, Vec)> { + // wrap the doctest into function body to get correct syntax highlighting + let prefix = "fn doctest() {\n"; + let suffix = "}\n"; + // Mapping from extracted documentation code to original code + let mut range_mapping: RangesMap = BTreeMap::new(); + let mut line_start = TextSize::try_from(prefix.len()).unwrap(); + let mut is_codeblock = false; + let mut is_doctest = false; + // Replace the original, line-spanning comment ranges by new, only comment-prefix + // spanning comment ranges. + let mut new_comments = Vec::new(); + let doctest = node + .children_with_tokens() + .filter_map(|el| el.into_token().and_then(ast::Comment::cast)) + .filter(|comment| comment.kind().doc.is_some()) + .filter(|comment| { + if let Some(idx) = comment.text().find(RUSTDOC_FENCE) { + is_codeblock = !is_codeblock; + // Check whether code is rust by inspecting fence guards + let guards = &comment.text()[idx + RUSTDOC_FENCE.len()..]; + let is_rust = + guards.split(',').all(|sub| RUSTDOC_FENCE_TOKENS.contains(&sub.trim())); + is_doctest = is_codeblock && is_rust; + false + } else { + is_doctest + } + }) + .map(|comment| { + let prefix_len = comment.prefix().len(); + let line: &str = comment.text().as_str(); + let range = comment.syntax().text_range(); + + // whitespace after comment is ignored + let pos = if let Some(ws) = line.chars().nth(prefix_len).filter(|c| c.is_whitespace()) { + prefix_len + ws.len_utf8() + } else { + prefix_len + }; + + // lines marked with `#` should be ignored in output, we skip the `#` char + let pos = if let Some(ws) = line.chars().nth(pos).filter(|&c| c == '#') { + pos + ws.len_utf8() + } else { + pos + }; + + range_mapping.insert(line_start, range.start() + TextSize::try_from(pos).unwrap()); + new_comments.push(HighlightedRange { + range: TextRange::new( + range.start(), + range.start() + TextSize::try_from(pos).unwrap(), + ), + highlight: HighlightTag::Comment | HighlightModifier::Documentation, + binding_hash: None, + }); + line_start += range.len() - TextSize::try_from(pos).unwrap(); + line_start += TextSize::try_from('\n'.len_utf8()).unwrap(); + + line[pos..].to_owned() + }) + .join("\n"); + + if doctest.is_empty() { + return None; + } + + let doctest = format!("{}{}{}", prefix, doctest, suffix); + Some((doctest, range_mapping, new_comments)) +} + +/// Injection of syntax highlighting of doctests. +pub(super) fn highlight_doc_comment( + text: String, + range_mapping: RangesMap, + new_comments: Vec, + stack: &mut HighlightedRangeStack, +) { + let (analysis, tmp_file_id) = Analysis::from_single_file(text); + + stack.push(); + for mut h in analysis.with_db(|db| super::highlight(db, tmp_file_id, None, true)).unwrap() { + // Determine start offset and end offset in case of multi-line ranges + let mut start_offset = None; + let mut end_offset = None; + for (line_start, orig_line_start) in range_mapping.range(..h.range.end()).rev() { + // It's possible for orig_line_start - line_start to be negative. Add h.range.start() + // here and remove it from the end range after the loop below so that the values are + // always non-negative. + let offset = h.range.start() + orig_line_start - line_start; + if line_start <= &h.range.start() { + start_offset.get_or_insert(offset); + break; + } else { + end_offset.get_or_insert(offset); + } + } + if let Some(start_offset) = start_offset { + h.range = TextRange::new( + start_offset, + h.range.end() + end_offset.unwrap_or(start_offset) - h.range.start(), + ); + + h.highlight |= HighlightModifier::Injected; + stack.add(h); + } + } + + // Inject the comment prefix highlight ranges + stack.push(); + for comment in new_comments { + stack.add(comment); + } + stack.pop_and_inject(None); + stack + .pop_and_inject(Some(Highlight::from(HighlightTag::Generic) | HighlightModifier::Injected)); +} diff --git a/crates/ide/src/syntax_highlighting/tags.rs b/crates/ide/src/syntax_highlighting/tags.rs new file mode 100644 index 000000000..49ec94bdc --- /dev/null +++ b/crates/ide/src/syntax_highlighting/tags.rs @@ -0,0 +1,203 @@ +//! Defines token tags we use for syntax highlighting. +//! A tag is not unlike a CSS class. + +use std::{fmt, ops}; + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct Highlight { + pub tag: HighlightTag, + pub modifiers: HighlightModifiers, +} + +#[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct HighlightModifiers(u32); + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub enum HighlightTag { + Attribute, + BoolLiteral, + BuiltinType, + ByteLiteral, + CharLiteral, + Comment, + Constant, + Enum, + EnumVariant, + EscapeSequence, + Field, + Function, + Generic, + Keyword, + Lifetime, + Macro, + Module, + NumericLiteral, + Punctuation, + SelfKeyword, + SelfType, + Static, + StringLiteral, + Struct, + Trait, + TypeAlias, + TypeParam, + Union, + ValueParam, + Local, + UnresolvedReference, + FormatSpecifier, + Operator, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] +#[repr(u8)] +pub enum HighlightModifier { + /// Used to differentiate individual elements within attributes. + Attribute = 0, + /// Used with keywords like `if` and `break`. + ControlFlow, + /// `foo` in `fn foo(x: i32)` is a definition, `foo` in `foo(90 + 2)` is + /// not. + Definition, + Documentation, + Injected, + Mutable, + Unsafe, +} + +impl HighlightTag { + fn as_str(self) -> &'static str { + match self { + HighlightTag::Attribute => "attribute", + HighlightTag::BoolLiteral => "bool_literal", + HighlightTag::BuiltinType => "builtin_type", + HighlightTag::ByteLiteral => "byte_literal", + HighlightTag::CharLiteral => "char_literal", + HighlightTag::Comment => "comment", + HighlightTag::Constant => "constant", + HighlightTag::Enum => "enum", + HighlightTag::EnumVariant => "enum_variant", + HighlightTag::EscapeSequence => "escape_sequence", + HighlightTag::Field => "field", + HighlightTag::FormatSpecifier => "format_specifier", + HighlightTag::Function => "function", + HighlightTag::Generic => "generic", + HighlightTag::Keyword => "keyword", + HighlightTag::Lifetime => "lifetime", + HighlightTag::Punctuation => "punctuation", + HighlightTag::Macro => "macro", + HighlightTag::Module => "module", + HighlightTag::NumericLiteral => "numeric_literal", + HighlightTag::Operator => "operator", + HighlightTag::SelfKeyword => "self_keyword", + HighlightTag::SelfType => "self_type", + HighlightTag::Static => "static", + HighlightTag::StringLiteral => "string_literal", + HighlightTag::Struct => "struct", + HighlightTag::Trait => "trait", + HighlightTag::TypeAlias => "type_alias", + HighlightTag::TypeParam => "type_param", + HighlightTag::Union => "union", + HighlightTag::ValueParam => "value_param", + HighlightTag::Local => "variable", + HighlightTag::UnresolvedReference => "unresolved_reference", + } + } +} + +impl fmt::Display for HighlightTag { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(self.as_str(), f) + } +} + +impl HighlightModifier { + const ALL: &'static [HighlightModifier] = &[ + HighlightModifier::Attribute, + HighlightModifier::ControlFlow, + HighlightModifier::Definition, + HighlightModifier::Documentation, + HighlightModifier::Injected, + HighlightModifier::Mutable, + HighlightModifier::Unsafe, + ]; + + fn as_str(self) -> &'static str { + match self { + HighlightModifier::Attribute => "attribute", + HighlightModifier::ControlFlow => "control", + HighlightModifier::Definition => "declaration", + HighlightModifier::Documentation => "documentation", + HighlightModifier::Injected => "injected", + HighlightModifier::Mutable => "mutable", + HighlightModifier::Unsafe => "unsafe", + } + } + + fn mask(self) -> u32 { + 1 << (self as u32) + } +} + +impl fmt::Display for HighlightModifier { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(self.as_str(), f) + } +} + +impl fmt::Display for Highlight { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.tag)?; + for modifier in self.modifiers.iter() { + write!(f, ".{}", modifier)? + } + Ok(()) + } +} + +impl From for Highlight { + fn from(tag: HighlightTag) -> Highlight { + Highlight::new(tag) + } +} + +impl Highlight { + pub(crate) fn new(tag: HighlightTag) -> Highlight { + Highlight { tag, modifiers: HighlightModifiers::default() } + } +} + +impl ops::BitOr for HighlightTag { + type Output = Highlight; + + fn bitor(self, rhs: HighlightModifier) -> Highlight { + Highlight::new(self) | rhs + } +} + +impl ops::BitOrAssign for HighlightModifiers { + fn bitor_assign(&mut self, rhs: HighlightModifier) { + self.0 |= rhs.mask(); + } +} + +impl ops::BitOrAssign for Highlight { + fn bitor_assign(&mut self, rhs: HighlightModifier) { + self.modifiers |= rhs; + } +} + +impl ops::BitOr for Highlight { + type Output = Highlight; + + fn bitor(mut self, rhs: HighlightModifier) -> Highlight { + self |= rhs; + self + } +} + +impl HighlightModifiers { + pub fn iter(self) -> impl Iterator { + HighlightModifier::ALL.iter().copied().filter(move |it| self.0 & it.mask() == it.mask()) + } +} diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs new file mode 100644 index 000000000..94f37d773 --- /dev/null +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -0,0 +1,445 @@ +use std::fs; + +use expect::{expect_file, ExpectFile}; +use test_utils::project_dir; + +use crate::{mock_analysis::single_file, FileRange, TextRange}; + +#[test] +fn test_highlighting() { + check_highlighting( + r#" +use inner::{self as inner_mod}; +mod inner {} + +#[derive(Clone, Debug)] +struct Foo { + pub x: i32, + pub y: i32, +} + +trait Bar { + fn bar(&self) -> i32; +} + +impl Bar for Foo { + fn bar(&self) -> i32 { + self.x + } +} + +impl Foo { + fn baz(mut self) -> i32 { + self.x + } + + fn qux(&mut self) { + self.x = 0; + } +} + +static mut STATIC_MUT: i32 = 0; + +fn foo<'a, T>() -> T { + foo::<'a, i32>() +} + +macro_rules! def_fn { + ($($tt:tt)*) => {$($tt)*} +} + +def_fn! { + fn bar() -> u32 { + 100 + } +} + +macro_rules! noop { + ($expr:expr) => { + $expr + } +} + +// comment +fn main() { + println!("Hello, {}!", 92); + + let mut vec = Vec::new(); + if true { + let x = 92; + vec.push(Foo { x, y: 1 }); + } + unsafe { + vec.set_len(0); + STATIC_MUT = 1; + } + + for e in vec { + // Do nothing + } + + noop!(noop!(1)); + + let mut x = 42; + let y = &mut x; + let z = &y; + + let Foo { x: z, y } = Foo { x: z, y }; + + y; +} + +enum Option { + Some(T), + None, +} +use Option::*; + +impl Option { + fn and(self, other: Option) -> Option<(T, U)> { + match other { + None => unimplemented!(), + Nope => Nope, + } + } +} +"# + .trim(), + expect_file!["crates/ide/test_data/highlighting.html"], + false, + ); +} + +#[test] +fn test_rainbow_highlighting() { + check_highlighting( + r#" +fn main() { + let hello = "hello"; + let x = hello.to_string(); + let y = hello.to_string(); + + let x = "other color please!"; + let y = x.to_string(); +} + +fn bar() { + let mut hello = "hello"; +} +"# + .trim(), + expect_file!["crates/ide/test_data/rainbow_highlighting.html"], + true, + ); +} + +#[test] +fn accidentally_quadratic() { + let file = project_dir().join("crates/syntax/test_data/accidentally_quadratic"); + let src = fs::read_to_string(file).unwrap(); + + let (analysis, file_id) = single_file(&src); + + // let t = std::time::Instant::now(); + let _ = analysis.highlight(file_id).unwrap(); + // eprintln!("elapsed: {:?}", t.elapsed()); +} + +#[test] +fn test_ranges() { + let (analysis, file_id) = single_file( + r#" +#[derive(Clone, Debug)] +struct Foo { + pub x: i32, + pub y: i32, +} +"#, + ); + + // The "x" + let highlights = &analysis + .highlight_range(FileRange { file_id, range: TextRange::at(45.into(), 1.into()) }) + .unwrap(); + + assert_eq!(&highlights[0].highlight.to_string(), "field.declaration"); +} + +#[test] +fn test_flattening() { + check_highlighting( + r##" +fn fixture(ra_fixture: &str) {} + +fn main() { + fixture(r#" + trait Foo { + fn foo() { + println!("2 + 2 = {}", 4); + } + }"# + ); +}"## + .trim(), + expect_file!["crates/ide/test_data/highlight_injection.html"], + false, + ); +} + +#[test] +fn ranges_sorted() { + let (analysis, file_id) = single_file( + r#" +#[foo(bar = "bar")] +macro_rules! test {} +}"# + .trim(), + ); + let _ = analysis.highlight(file_id).unwrap(); +} + +#[test] +fn test_string_highlighting() { + // The format string detection is based on macro-expansion, + // thus, we have to copy the macro definition from `std` + check_highlighting( + r#" +macro_rules! println { + ($($arg:tt)*) => ({ + $crate::io::_print($crate::format_args_nl!($($arg)*)); + }) +} +#[rustc_builtin_macro] +macro_rules! format_args_nl { + ($fmt:expr) => {{ /* compiler built-in */ }}; + ($fmt:expr, $($args:tt)*) => {{ /* compiler built-in */ }}; +} + +fn main() { + // from https://doc.rust-lang.org/std/fmt/index.html + println!("Hello"); // => "Hello" + println!("Hello, {}!", "world"); // => "Hello, world!" + println!("The number is {}", 1); // => "The number is 1" + println!("{:?}", (3, 4)); // => "(3, 4)" + println!("{value}", value=4); // => "4" + println!("{} {}", 1, 2); // => "1 2" + println!("{:04}", 42); // => "0042" with leading zerosV + println!("{1} {} {0} {}", 1, 2); // => "2 1 1 2" + println!("{argument}", argument = "test"); // => "test" + println!("{name} {}", 1, name = 2); // => "2 1" + println!("{a} {c} {b}", a="a", b='b', c=3); // => "a 3 b" + println!("{{{}}}", 2); // => "{2}" + println!("Hello {:5}!", "x"); + println!("Hello {:1$}!", "x", 5); + println!("Hello {1:0$}!", 5, "x"); + println!("Hello {:width$}!", "x", width = 5); + println!("Hello {:<5}!", "x"); + println!("Hello {:-<5}!", "x"); + println!("Hello {:^5}!", "x"); + println!("Hello {:>5}!", "x"); + println!("Hello {:+}!", 5); + println!("{:#x}!", 27); + println!("Hello {:05}!", 5); + println!("Hello {:05}!", -5); + println!("{:#010x}!", 27); + println!("Hello {0} is {1:.5}", "x", 0.01); + println!("Hello {1} is {2:.0$}", 5, "x", 0.01); + println!("Hello {0} is {2:.1$}", "x", 5, 0.01); + println!("Hello {} is {:.*}", "x", 5, 0.01); + println!("Hello {} is {2:.*}", "x", 5, 0.01); + println!("Hello {} is {number:.prec$}", "x", prec = 5, number = 0.01); + println!("{}, `{name:.*}` has 3 fractional digits", "Hello", 3, name=1234.56); + println!("{}, `{name:.*}` has 3 characters", "Hello", 3, name="1234.56"); + println!("{}, `{name:>8.*}` has 3 right-aligned characters", "Hello", 3, name="1234.56"); + println!("Hello {{}}"); + println!("{{ Hello"); + + println!(r"Hello, {}!", "world"); + + // escape sequences + println!("Hello\nWorld"); + println!("\u{48}\x65\x6C\x6C\x6F World"); + + println!("{\x41}", A = 92); + println!("{ничоси}", ничоси = 92); +}"# + .trim(), + expect_file!["crates/ide/test_data/highlight_strings.html"], + false, + ); +} + +#[test] +fn test_unsafe_highlighting() { + check_highlighting( + r#" +unsafe fn unsafe_fn() {} + +union Union { + a: u32, + b: f32, +} + +struct HasUnsafeFn; + +impl HasUnsafeFn { + unsafe fn unsafe_method(&self) {} +} + +struct TypeForStaticMut { + a: u8 +} + +static mut global_mut: TypeForStaticMut = TypeForStaticMut { a: 0 }; + +#[repr(packed)] +struct Packed { + a: u16, +} + +trait DoTheAutoref { + fn calls_autoref(&self); +} + +impl DoTheAutoref for u16 { + fn calls_autoref(&self) {} +} + +fn main() { + let x = &5 as *const _ as *const usize; + let u = Union { b: 0 }; + unsafe { + // unsafe fn and method calls + unsafe_fn(); + let b = u.b; + match u { + Union { b: 0 } => (), + Union { a } => (), + } + HasUnsafeFn.unsafe_method(); + + // unsafe deref + let y = *x; + + // unsafe access to a static mut + let a = global_mut.a; + + // unsafe ref of packed fields + let packed = Packed { a: 0 }; + let a = &packed.a; + let ref a = packed.a; + let Packed { ref a } = packed; + let Packed { a: ref _a } = packed; + + // unsafe auto ref of packed field + packed.a.calls_autoref(); + } +} +"# + .trim(), + expect_file!["crates/ide/test_data/highlight_unsafe.html"], + false, + ); +} + +#[test] +fn test_highlight_doctest() { + check_highlighting( + r#" +/// ``` +/// let _ = "early doctests should not go boom"; +/// ``` +struct Foo { + bar: bool, +} + +impl Foo { + pub const bar: bool = true; + + /// Constructs a new `Foo`. + /// + /// # Examples + /// + /// ``` + /// # #![allow(unused_mut)] + /// let mut foo: Foo = Foo::new(); + /// ``` + pub const fn new() -> Foo { + Foo { bar: true } + } + + /// `bar` method on `Foo`. + /// + /// # Examples + /// + /// ``` + /// use x::y; + /// + /// let foo = Foo::new(); + /// + /// // calls bar on foo + /// assert!(foo.bar()); + /// + /// let bar = foo.bar || Foo::bar; + /// + /// /* multi-line + /// comment */ + /// + /// let multi_line_string = "Foo + /// bar + /// "; + /// + /// ``` + /// + /// ```rust,no_run + /// let foobar = Foo::new().bar(); + /// ``` + /// + /// ```sh + /// echo 1 + /// ``` + pub fn foo(&self) -> bool { + true + } +} + +/// ``` +/// noop!(1); +/// ``` +macro_rules! noop { + ($expr:expr) => { + $expr + } +} +"# + .trim(), + expect_file!["crates/ide/test_data/highlight_doctest.html"], + false, + ); +} + +#[test] +fn test_extern_crate() { + check_highlighting( + r#" + //- /main.rs + extern crate std; + extern crate alloc as abc; + //- /std/lib.rs + pub struct S; + //- /alloc/lib.rs + pub struct A + "#, + expect_file!["crates/ide/test_data/highlight_extern_crate.html"], + false, + ); +} + +/// Highlights the code given by the `ra_fixture` argument, renders the +/// result as HTML, and compares it with the HTML file given as `snapshot`. +/// Note that the `snapshot` file is overwritten by the rendered HTML. +fn check_highlighting(ra_fixture: &str, expect: ExpectFile, rainbow: bool) { + let (analysis, file_id) = single_file(ra_fixture); + let actual_html = &analysis.highlight_as_html(file_id, rainbow).unwrap(); + expect.assert_eq(actual_html) +} diff --git a/crates/ide/src/syntax_tree.rs b/crates/ide/src/syntax_tree.rs new file mode 100644 index 000000000..f80044959 --- /dev/null +++ b/crates/ide/src/syntax_tree.rs @@ -0,0 +1,359 @@ +use base_db::{FileId, SourceDatabase}; +use ide_db::RootDatabase; +use syntax::{ + algo, AstNode, NodeOrToken, SourceFile, + SyntaxKind::{RAW_STRING, STRING}, + SyntaxToken, TextRange, TextSize, +}; + +// Feature: Show Syntax Tree +// +// Shows the parse tree of the current file. It exists mostly for debugging +// rust-analyzer itself. +// +// |=== +// | Editor | Action Name +// +// | VS Code | **Rust Analyzer: Show Syntax Tree** +// |=== +pub(crate) fn syntax_tree( + db: &RootDatabase, + file_id: FileId, + text_range: Option, +) -> String { + let parse = db.parse(file_id); + if let Some(text_range) = text_range { + let node = match algo::find_covering_element(parse.tree().syntax(), text_range) { + NodeOrToken::Node(node) => node, + NodeOrToken::Token(token) => { + if let Some(tree) = syntax_tree_for_string(&token, text_range) { + return tree; + } + token.parent() + } + }; + + format!("{:#?}", node) + } else { + format!("{:#?}", parse.tree().syntax()) + } +} + +/// Attempts parsing the selected contents of a string literal +/// as rust syntax and returns its syntax tree +fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option { + // When the range is inside a string + // we'll attempt parsing it as rust syntax + // to provide the syntax tree of the contents of the string + match token.kind() { + STRING | RAW_STRING => syntax_tree_for_token(token, text_range), + _ => None, + } +} + +fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option { + // Range of the full node + let node_range = node.text_range(); + let text = node.text().to_string(); + + // We start at some point inside the node + // Either we have selected the whole string + // or our selection is inside it + let start = text_range.start() - node_range.start(); + + // how many characters we have selected + let len = text_range.len(); + + let node_len = node_range.len(); + + let start = start; + + // We want to cap our length + let len = len.min(node_len); + + // Ensure our slice is inside the actual string + let end = + if start + len < TextSize::of(&text) { start + len } else { TextSize::of(&text) - start }; + + let text = &text[TextRange::new(start, end)]; + + // Remove possible extra string quotes from the start + // and the end of the string + let text = text + .trim_start_matches('r') + .trim_start_matches('#') + .trim_start_matches('"') + .trim_end_matches('#') + .trim_end_matches('"') + .trim() + // Remove custom markers + .replace("<|>", ""); + + let parsed = SourceFile::parse(&text); + + // If the "file" parsed without errors, + // return its syntax + if parsed.errors().is_empty() { + return Some(format!("{:#?}", parsed.tree().syntax())); + } + + None +} + +#[cfg(test)] +mod tests { + use test_utils::assert_eq_text; + + use crate::mock_analysis::{analysis_and_range, single_file}; + + #[test] + fn test_syntax_tree_without_range() { + // Basic syntax + let (analysis, file_id) = single_file(r#"fn foo() {}"#); + let syn = analysis.syntax_tree(file_id, None).unwrap(); + + assert_eq_text!( + syn.trim(), + r#" +SOURCE_FILE@0..11 + FN@0..11 + FN_KW@0..2 "fn" + WHITESPACE@2..3 " " + NAME@3..6 + IDENT@3..6 "foo" + PARAM_LIST@6..8 + L_PAREN@6..7 "(" + R_PAREN@7..8 ")" + WHITESPACE@8..9 " " + BLOCK_EXPR@9..11 + L_CURLY@9..10 "{" + R_CURLY@10..11 "}" +"# + .trim() + ); + + let (analysis, file_id) = single_file( + r#" +fn test() { + assert!(" + fn foo() { + } + ", ""); +}"# + .trim(), + ); + let syn = analysis.syntax_tree(file_id, None).unwrap(); + + assert_eq_text!( + syn.trim(), + r#" +SOURCE_FILE@0..60 + FN@0..60 + FN_KW@0..2 "fn" + WHITESPACE@2..3 " " + NAME@3..7 + IDENT@3..7 "test" + PARAM_LIST@7..9 + L_PAREN@7..8 "(" + R_PAREN@8..9 ")" + WHITESPACE@9..10 " " + BLOCK_EXPR@10..60 + L_CURLY@10..11 "{" + WHITESPACE@11..16 "\n " + EXPR_STMT@16..58 + MACRO_CALL@16..57 + PATH@16..22 + PATH_SEGMENT@16..22 + NAME_REF@16..22 + IDENT@16..22 "assert" + BANG@22..23 "!" + TOKEN_TREE@23..57 + L_PAREN@23..24 "(" + STRING@24..52 "\"\n fn foo() {\n ..." + COMMA@52..53 "," + WHITESPACE@53..54 " " + STRING@54..56 "\"\"" + R_PAREN@56..57 ")" + SEMICOLON@57..58 ";" + WHITESPACE@58..59 "\n" + R_CURLY@59..60 "}" +"# + .trim() + ); + } + + #[test] + fn test_syntax_tree_with_range() { + let (analysis, range) = analysis_and_range(r#"<|>fn foo() {}<|>"#.trim()); + let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); + + assert_eq_text!( + syn.trim(), + r#" +FN@0..11 + FN_KW@0..2 "fn" + WHITESPACE@2..3 " " + NAME@3..6 + IDENT@3..6 "foo" + PARAM_LIST@6..8 + L_PAREN@6..7 "(" + R_PAREN@7..8 ")" + WHITESPACE@8..9 " " + BLOCK_EXPR@9..11 + L_CURLY@9..10 "{" + R_CURLY@10..11 "}" +"# + .trim() + ); + + let (analysis, range) = analysis_and_range( + r#"fn test() { + <|>assert!(" + fn foo() { + } + ", "");<|> +}"# + .trim(), + ); + let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); + + assert_eq_text!( + syn.trim(), + r#" +EXPR_STMT@16..58 + MACRO_CALL@16..57 + PATH@16..22 + PATH_SEGMENT@16..22 + NAME_REF@16..22 + IDENT@16..22 "assert" + BANG@22..23 "!" + TOKEN_TREE@23..57 + L_PAREN@23..24 "(" + STRING@24..52 "\"\n fn foo() {\n ..." + COMMA@52..53 "," + WHITESPACE@53..54 " " + STRING@54..56 "\"\"" + R_PAREN@56..57 ")" + SEMICOLON@57..58 ";" +"# + .trim() + ); + } + + #[test] + fn test_syntax_tree_inside_string() { + let (analysis, range) = analysis_and_range( + r#"fn test() { + assert!(" +<|>fn foo() { +}<|> +fn bar() { +} + ", ""); +}"# + .trim(), + ); + let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); + assert_eq_text!( + syn.trim(), + r#" +SOURCE_FILE@0..12 + FN@0..12 + FN_KW@0..2 "fn" + WHITESPACE@2..3 " " + NAME@3..6 + IDENT@3..6 "foo" + PARAM_LIST@6..8 + L_PAREN@6..7 "(" + R_PAREN@7..8 ")" + WHITESPACE@8..9 " " + BLOCK_EXPR@9..12 + L_CURLY@9..10 "{" + WHITESPACE@10..11 "\n" + R_CURLY@11..12 "}" +"# + .trim() + ); + + // With a raw string + let (analysis, range) = analysis_and_range( + r###"fn test() { + assert!(r#" +<|>fn foo() { +}<|> +fn bar() { +} + "#, ""); +}"### + .trim(), + ); + let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); + assert_eq_text!( + syn.trim(), + r#" +SOURCE_FILE@0..12 + FN@0..12 + FN_KW@0..2 "fn" + WHITESPACE@2..3 " " + NAME@3..6 + IDENT@3..6 "foo" + PARAM_LIST@6..8 + L_PAREN@6..7 "(" + R_PAREN@7..8 ")" + WHITESPACE@8..9 " " + BLOCK_EXPR@9..12 + L_CURLY@9..10 "{" + WHITESPACE@10..11 "\n" + R_CURLY@11..12 "}" +"# + .trim() + ); + + // With a raw string + let (analysis, range) = analysis_and_range( + r###"fn test() { + assert!(r<|>#" +fn foo() { +} +fn bar() { +}"<|>#, ""); +}"### + .trim(), + ); + let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); + assert_eq_text!( + syn.trim(), + r#" +SOURCE_FILE@0..25 + FN@0..12 + FN_KW@0..2 "fn" + WHITESPACE@2..3 " " + NAME@3..6 + IDENT@3..6 "foo" + PARAM_LIST@6..8 + L_PAREN@6..7 "(" + R_PAREN@7..8 ")" + WHITESPACE@8..9 " " + BLOCK_EXPR@9..12 + L_CURLY@9..10 "{" + WHITESPACE@10..11 "\n" + R_CURLY@11..12 "}" + WHITESPACE@12..13 "\n" + FN@13..25 + FN_KW@13..15 "fn" + WHITESPACE@15..16 " " + NAME@16..19 + IDENT@16..19 "bar" + PARAM_LIST@19..21 + L_PAREN@19..20 "(" + R_PAREN@20..21 ")" + WHITESPACE@21..22 " " + BLOCK_EXPR@22..25 + L_CURLY@22..23 "{" + WHITESPACE@23..24 "\n" + R_CURLY@24..25 "}" +"# + .trim() + ); + } +} diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs new file mode 100644 index 000000000..899ce5f26 --- /dev/null +++ b/crates/ide/src/typing.rs @@ -0,0 +1,364 @@ +//! This module handles auto-magic editing actions applied together with users +//! edits. For example, if the user typed +//! +//! ```text +//! foo +//! .bar() +//! .baz() +//! | // <- cursor is here +//! ``` +//! +//! and types `.` next, we want to indent the dot. +//! +//! Language server executes such typing assists synchronously. That is, they +//! block user's typing and should be pretty fast for this reason! + +mod on_enter; + +use base_db::{FilePosition, SourceDatabase}; +use ide_db::{source_change::SourceFileEdit, RootDatabase}; +use syntax::{ + algo::find_node_at_offset, + ast::{self, edit::IndentLevel, AstToken}, + AstNode, SourceFile, + SyntaxKind::{FIELD_EXPR, METHOD_CALL_EXPR}, + TextRange, TextSize, +}; + +use text_edit::TextEdit; + +use crate::SourceChange; + +pub(crate) use on_enter::on_enter; + +pub(crate) const TRIGGER_CHARS: &str = ".=>"; + +// Feature: On Typing Assists +// +// Some features trigger on typing certain characters: +// +// - typing `let =` tries to smartly add `;` if `=` is followed by an existing expression +// - typing `.` in a chain method call auto-indents +pub(crate) fn on_char_typed( + db: &RootDatabase, + position: FilePosition, + char_typed: char, +) -> Option { + assert!(TRIGGER_CHARS.contains(char_typed)); + let file = &db.parse(position.file_id).tree(); + assert_eq!(file.syntax().text().char_at(position.offset), Some(char_typed)); + let edit = on_char_typed_inner(file, position.offset, char_typed)?; + Some(SourceFileEdit { file_id: position.file_id, edit }.into()) +} + +fn on_char_typed_inner(file: &SourceFile, offset: TextSize, char_typed: char) -> Option { + assert!(TRIGGER_CHARS.contains(char_typed)); + match char_typed { + '.' => on_dot_typed(file, offset), + '=' => on_eq_typed(file, offset), + '>' => on_arrow_typed(file, offset), + _ => unreachable!(), + } +} + +/// Returns an edit which should be applied after `=` was typed. Primarily, +/// this works when adding `let =`. +// FIXME: use a snippet completion instead of this hack here. +fn on_eq_typed(file: &SourceFile, offset: TextSize) -> Option { + assert_eq!(file.syntax().text().char_at(offset), Some('=')); + let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), offset)?; + if let_stmt.semicolon_token().is_some() { + return None; + } + if let Some(expr) = let_stmt.initializer() { + let expr_range = expr.syntax().text_range(); + if expr_range.contains(offset) && offset != expr_range.start() { + return None; + } + if file.syntax().text().slice(offset..expr_range.start()).contains_char('\n') { + return None; + } + } else { + return None; + } + let offset = let_stmt.syntax().text_range().end(); + Some(TextEdit::insert(offset, ";".to_string())) +} + +/// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately. +fn on_dot_typed(file: &SourceFile, offset: TextSize) -> Option { + assert_eq!(file.syntax().text().char_at(offset), Some('.')); + let whitespace = + file.syntax().token_at_offset(offset).left_biased().and_then(ast::Whitespace::cast)?; + + let current_indent = { + let text = whitespace.text(); + let newline = text.rfind('\n')?; + &text[newline + 1..] + }; + let current_indent_len = TextSize::of(current_indent); + + let parent = whitespace.syntax().parent(); + // Make sure dot is a part of call chain + if !matches!(parent.kind(), FIELD_EXPR | METHOD_CALL_EXPR) { + return None; + } + let prev_indent = IndentLevel::from_node(&parent); + let target_indent = format!(" {}", prev_indent); + let target_indent_len = TextSize::of(&target_indent); + if current_indent_len == target_indent_len { + return None; + } + + Some(TextEdit::replace(TextRange::new(offset - current_indent_len, offset), target_indent)) +} + +/// Adds a space after an arrow when `fn foo() { ... }` is turned into `fn foo() -> { ... }` +fn on_arrow_typed(file: &SourceFile, offset: TextSize) -> Option { + let file_text = file.syntax().text(); + assert_eq!(file_text.char_at(offset), Some('>')); + let after_arrow = offset + TextSize::of('>'); + if file_text.char_at(after_arrow) != Some('{') { + return None; + } + if find_node_at_offset::(file.syntax(), offset).is_none() { + return None; + } + + Some(TextEdit::insert(after_arrow, " ".to_string())) +} + +#[cfg(test)] +mod tests { + use test_utils::{assert_eq_text, extract_offset}; + + use super::*; + + fn do_type_char(char_typed: char, before: &str) -> Option { + let (offset, before) = extract_offset(before); + let edit = TextEdit::insert(offset, char_typed.to_string()); + let mut before = before.to_string(); + edit.apply(&mut before); + let parse = SourceFile::parse(&before); + on_char_typed_inner(&parse.tree(), offset, char_typed).map(|it| { + it.apply(&mut before); + before.to_string() + }) + } + + fn type_char(char_typed: char, ra_fixture_before: &str, ra_fixture_after: &str) { + let actual = do_type_char(char_typed, ra_fixture_before) + .unwrap_or_else(|| panic!("typing `{}` did nothing", char_typed)); + + assert_eq_text!(ra_fixture_after, &actual); + } + + fn type_char_noop(char_typed: char, before: &str) { + let file_change = do_type_char(char_typed, before); + assert!(file_change.is_none()) + } + + #[test] + fn test_on_eq_typed() { + // do_check(r" + // fn foo() { + // let foo =<|> + // } + // ", r" + // fn foo() { + // let foo =; + // } + // "); + type_char( + '=', + r" +fn foo() { + let foo <|> 1 + 1 +} +", + r" +fn foo() { + let foo = 1 + 1; +} +", + ); + // do_check(r" + // fn foo() { + // let foo =<|> + // let bar = 1; + // } + // ", r" + // fn foo() { + // let foo =; + // let bar = 1; + // } + // "); + } + + #[test] + fn indents_new_chain_call() { + type_char( + '.', + r" + fn main() { + xs.foo() + <|> + } + ", + r" + fn main() { + xs.foo() + . + } + ", + ); + type_char_noop( + '.', + r" + fn main() { + xs.foo() + <|> + } + ", + ) + } + + #[test] + fn indents_new_chain_call_with_semi() { + type_char( + '.', + r" + fn main() { + xs.foo() + <|>; + } + ", + r" + fn main() { + xs.foo() + .; + } + ", + ); + type_char_noop( + '.', + r" + fn main() { + xs.foo() + <|>; + } + ", + ) + } + + #[test] + fn indents_new_chain_call_with_let() { + type_char( + '.', + r#" +fn main() { + let _ = foo + <|> + bar() +} +"#, + r#" +fn main() { + let _ = foo + . + bar() +} +"#, + ); + } + + #[test] + fn indents_continued_chain_call() { + type_char( + '.', + r" + fn main() { + xs.foo() + .first() + <|> + } + ", + r" + fn main() { + xs.foo() + .first() + . + } + ", + ); + type_char_noop( + '.', + r" + fn main() { + xs.foo() + .first() + <|> + } + ", + ); + } + + #[test] + fn indents_middle_of_chain_call() { + type_char( + '.', + r" + fn source_impl() { + let var = enum_defvariant_list().unwrap() + <|> + .nth(92) + .unwrap(); + } + ", + r" + fn source_impl() { + let var = enum_defvariant_list().unwrap() + . + .nth(92) + .unwrap(); + } + ", + ); + type_char_noop( + '.', + r" + fn source_impl() { + let var = enum_defvariant_list().unwrap() + <|> + .nth(92) + .unwrap(); + } + ", + ); + } + + #[test] + fn dont_indent_freestanding_dot() { + type_char_noop( + '.', + r" + fn main() { + <|> + } + ", + ); + type_char_noop( + '.', + r" + fn main() { + <|> + } + ", + ); + } + + #[test] + fn adds_space_after_return_type() { + type_char('>', "fn foo() -<|>{ 92 }", "fn foo() -> { 92 }") + } +} diff --git a/crates/ide/src/typing/on_enter.rs b/crates/ide/src/typing/on_enter.rs new file mode 100644 index 000000000..f7d46146c --- /dev/null +++ b/crates/ide/src/typing/on_enter.rs @@ -0,0 +1,256 @@ +//! Handles the `Enter` key press. At the momently, this only continues +//! comments, but should handle indent some time in the future as well. + +use base_db::{FilePosition, SourceDatabase}; +use ide_db::RootDatabase; +use syntax::{ + ast::{self, AstToken}, + AstNode, SmolStr, SourceFile, + SyntaxKind::*, + SyntaxToken, TextRange, TextSize, TokenAtOffset, +}; +use test_utils::mark; +use text_edit::TextEdit; + +// Feature: On Enter +// +// rust-analyzer can override kbd:[Enter] key to make it smarter: +// +// - kbd:[Enter] inside triple-slash comments automatically inserts `///` +// - kbd:[Enter] in the middle or after a trailing space in `//` inserts `//` +// +// This action needs to be assigned to shortcut explicitly. +// +// VS Code:: +// +// Add the following to `keybindings.json`: +// [source,json] +// ---- +// { +// "key": "Enter", +// "command": "rust-analyzer.onEnter", +// "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust" +// } +// ---- +pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option { + let parse = db.parse(position.file_id); + let file = parse.tree(); + let comment = file + .syntax() + .token_at_offset(position.offset) + .left_biased() + .and_then(ast::Comment::cast)?; + + if comment.kind().shape.is_block() { + return None; + } + + let prefix = comment.prefix(); + let comment_range = comment.syntax().text_range(); + if position.offset < comment_range.start() + TextSize::of(prefix) { + return None; + } + + let mut remove_last_space = false; + // Continuing single-line non-doc comments (like this one :) ) is annoying + if prefix == "//" && comment_range.end() == position.offset { + if comment.text().ends_with(' ') { + mark::hit!(continues_end_of_line_comment_with_space); + remove_last_space = true; + } else if !followed_by_comment(&comment) { + return None; + } + } + + let indent = node_indent(&file, comment.syntax())?; + let inserted = format!("\n{}{} $0", indent, prefix); + let delete = if remove_last_space { + TextRange::new(position.offset - TextSize::of(' '), position.offset) + } else { + TextRange::empty(position.offset) + }; + let edit = TextEdit::replace(delete, inserted); + Some(edit) +} + +fn followed_by_comment(comment: &ast::Comment) -> bool { + let ws = match comment.syntax().next_token().and_then(ast::Whitespace::cast) { + Some(it) => it, + None => return false, + }; + if ws.spans_multiple_lines() { + return false; + } + ws.syntax().next_token().and_then(ast::Comment::cast).is_some() +} + +fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option { + let ws = match file.syntax().token_at_offset(token.text_range().start()) { + TokenAtOffset::Between(l, r) => { + assert!(r == *token); + l + } + TokenAtOffset::Single(n) => { + assert!(n == *token); + return Some("".into()); + } + TokenAtOffset::None => unreachable!(), + }; + if ws.kind() != WHITESPACE { + return None; + } + let text = ws.text(); + let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0); + Some(text[pos..].into()) +} + +#[cfg(test)] +mod tests { + use stdx::trim_indent; + use test_utils::{assert_eq_text, mark}; + + use crate::mock_analysis::analysis_and_position; + + fn apply_on_enter(before: &str) -> Option { + let (analysis, position) = analysis_and_position(&before); + let result = analysis.on_enter(position).unwrap()?; + + let mut actual = analysis.file_text(position.file_id).unwrap().to_string(); + result.apply(&mut actual); + Some(actual) + } + + fn do_check(ra_fixture_before: &str, ra_fixture_after: &str) { + let ra_fixture_after = &trim_indent(ra_fixture_after); + let actual = apply_on_enter(ra_fixture_before).unwrap(); + assert_eq_text!(ra_fixture_after, &actual); + } + + fn do_check_noop(ra_fixture_text: &str) { + assert!(apply_on_enter(ra_fixture_text).is_none()) + } + + #[test] + fn continues_doc_comment() { + do_check( + r" +/// Some docs<|> +fn foo() { +} +", + r" +/// Some docs +/// $0 +fn foo() { +} +", + ); + + do_check( + r" +impl S { + /// Some<|> docs. + fn foo() {} +} +", + r" +impl S { + /// Some + /// $0 docs. + fn foo() {} +} +", + ); + + do_check( + r" +///<|> Some docs +fn foo() { +} +", + r" +/// +/// $0 Some docs +fn foo() { +} +", + ); + } + + #[test] + fn does_not_continue_before_doc_comment() { + do_check_noop(r"<|>//! docz"); + } + + #[test] + fn continues_code_comment_in_the_middle_of_line() { + do_check( + r" +fn main() { + // Fix<|> me + let x = 1 + 1; +} +", + r" +fn main() { + // Fix + // $0 me + let x = 1 + 1; +} +", + ); + } + + #[test] + fn continues_code_comment_in_the_middle_several_lines() { + do_check( + r" +fn main() { + // Fix<|> + // me + let x = 1 + 1; +} +", + r" +fn main() { + // Fix + // $0 + // me + let x = 1 + 1; +} +", + ); + } + + #[test] + fn does_not_continue_end_of_line_comment() { + do_check_noop( + r" +fn main() { + // Fix me<|> + let x = 1 + 1; +} +", + ); + } + + #[test] + fn continues_end_of_line_comment_with_space() { + mark::check!(continues_end_of_line_comment_with_space); + do_check( + r#" +fn main() { + // Fix me <|> + let x = 1 + 1; +} +"#, + r#" +fn main() { + // Fix me + // $0 + let x = 1 + 1; +} +"#, + ); + } +} diff --git a/crates/ide/test_data/highlight_doctest.html b/crates/ide/test_data/highlight_doctest.html new file mode 100644 index 000000000..6322d404f --- /dev/null +++ b/crates/ide/test_data/highlight_doctest.html @@ -0,0 +1,102 @@ + + +
/// ```
+/// let _ = "early doctests should not go boom";
+/// ```
+struct Foo {
+    bar: bool,
+}
+
+impl Foo {
+    pub const bar: bool = true;
+
+    /// Constructs a new `Foo`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #![allow(unused_mut)]
+    /// let mut foo: Foo = Foo::new();
+    /// ```
+    pub const fn new() -> Foo {
+        Foo { bar: true }
+    }
+
+    /// `bar` method on `Foo`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use x::y;
+    ///
+    /// let foo = Foo::new();
+    ///
+    /// // calls bar on foo
+    /// assert!(foo.bar());
+    ///
+    /// let bar = foo.bar || Foo::bar;
+    ///
+    /// /* multi-line
+    ///        comment */
+    ///
+    /// let multi_line_string = "Foo
+    ///   bar
+    ///          ";
+    ///
+    /// ```
+    ///
+    /// ```rust,no_run
+    /// let foobar = Foo::new().bar();
+    /// ```
+    ///
+    /// ```sh
+    /// echo 1
+    /// ```
+    pub fn foo(&self) -> bool {
+        true
+    }
+}
+
+/// ```
+/// noop!(1);
+/// ```
+macro_rules! noop {
+    ($expr:expr) => {
+        $expr
+    }
+}
\ No newline at end of file diff --git a/crates/ide/test_data/highlight_extern_crate.html b/crates/ide/test_data/highlight_extern_crate.html new file mode 100644 index 000000000..800d894c7 --- /dev/null +++ b/crates/ide/test_data/highlight_extern_crate.html @@ -0,0 +1,40 @@ + + +
extern crate std;
+extern crate alloc as abc;
+
\ No newline at end of file diff --git a/crates/ide/test_data/highlight_injection.html b/crates/ide/test_data/highlight_injection.html new file mode 100644 index 000000000..18addd00d --- /dev/null +++ b/crates/ide/test_data/highlight_injection.html @@ -0,0 +1,48 @@ + + +
fn fixture(ra_fixture: &str) {}
+
+fn main() {
+    fixture(r#"
+        trait Foo {
+            fn foo() {
+                println!("2 + 2 = {}", 4);
+            }
+        }"#
+    );
+}
\ No newline at end of file diff --git a/crates/ide/test_data/highlight_strings.html b/crates/ide/test_data/highlight_strings.html new file mode 100644 index 000000000..1b681b2c6 --- /dev/null +++ b/crates/ide/test_data/highlight_strings.html @@ -0,0 +1,96 @@ + + +
macro_rules! println {
+    ($($arg:tt)*) => ({
+        $crate::io::_print($crate::format_args_nl!($($arg)*));
+    })
+}
+#[rustc_builtin_macro]
+macro_rules! format_args_nl {
+    ($fmt:expr) => {{ /* compiler built-in */ }};
+    ($fmt:expr, $($args:tt)*) => {{ /* compiler built-in */ }};
+}
+
+fn main() {
+    // from https://doc.rust-lang.org/std/fmt/index.html
+    println!("Hello");                 // => "Hello"
+    println!("Hello, {}!", "world");   // => "Hello, world!"
+    println!("The number is {}", 1);   // => "The number is 1"
+    println!("{:?}", (3, 4));          // => "(3, 4)"
+    println!("{value}", value=4);      // => "4"
+    println!("{} {}", 1, 2);           // => "1 2"
+    println!("{:04}", 42);             // => "0042" with leading zerosV
+    println!("{1} {} {0} {}", 1, 2);   // => "2 1 1 2"
+    println!("{argument}", argument = "test");   // => "test"
+    println!("{name} {}", 1, name = 2);          // => "2 1"
+    println!("{a} {c} {b}", a="a", b='b', c=3);  // => "a 3 b"
+    println!("{{{}}}", 2);                       // => "{2}"
+    println!("Hello {:5}!", "x");
+    println!("Hello {:1$}!", "x", 5);
+    println!("Hello {1:0$}!", 5, "x");
+    println!("Hello {:width$}!", "x", width = 5);
+    println!("Hello {:<5}!", "x");
+    println!("Hello {:-<5}!", "x");
+    println!("Hello {:^5}!", "x");
+    println!("Hello {:>5}!", "x");
+    println!("Hello {:+}!", 5);
+    println!("{:#x}!", 27);
+    println!("Hello {:05}!", 5);
+    println!("Hello {:05}!", -5);
+    println!("{:#010x}!", 27);
+    println!("Hello {0} is {1:.5}", "x", 0.01);
+    println!("Hello {1} is {2:.0$}", 5, "x", 0.01);
+    println!("Hello {0} is {2:.1$}", "x", 5, 0.01);
+    println!("Hello {} is {:.*}",    "x", 5, 0.01);
+    println!("Hello {} is {2:.*}",   "x", 5, 0.01);
+    println!("Hello {} is {number:.prec$}", "x", prec = 5, number = 0.01);
+    println!("{}, `{name:.*}` has 3 fractional digits", "Hello", 3, name=1234.56);
+    println!("{}, `{name:.*}` has 3 characters", "Hello", 3, name="1234.56");
+    println!("{}, `{name:>8.*}` has 3 right-aligned characters", "Hello", 3, name="1234.56");
+    println!("Hello {{}}");
+    println!("{{ Hello");
+
+    println!(r"Hello, {}!", "world");
+
+    // escape sequences
+    println!("Hello\nWorld");
+    println!("\u{48}\x65\x6C\x6C\x6F World");
+
+    println!("{\x41}", A = 92);
+    println!("{ничоси}", ничоси = 92);
+}
\ No newline at end of file diff --git a/crates/ide/test_data/highlight_unsafe.html b/crates/ide/test_data/highlight_unsafe.html new file mode 100644 index 000000000..552fea668 --- /dev/null +++ b/crates/ide/test_data/highlight_unsafe.html @@ -0,0 +1,99 @@ + + +
unsafe fn unsafe_fn() {}
+
+union Union {
+    a: u32,
+    b: f32,
+}
+
+struct HasUnsafeFn;
+
+impl HasUnsafeFn {
+    unsafe fn unsafe_method(&self) {}
+}
+
+struct TypeForStaticMut {
+    a: u8
+}
+
+static mut global_mut: TypeForStaticMut = TypeForStaticMut { a: 0 };
+
+#[repr(packed)]
+struct Packed {
+    a: u16,
+}
+
+trait DoTheAutoref {
+    fn calls_autoref(&self);
+}
+
+impl DoTheAutoref for u16 {
+    fn calls_autoref(&self) {}
+}
+
+fn main() {
+    let x = &5 as *const _ as *const usize;
+    let u = Union { b: 0 };
+    unsafe {
+        // unsafe fn and method calls
+        unsafe_fn();
+        let b = u.b;
+        match u {
+            Union { b: 0 } => (),
+            Union { a } => (),
+        }
+        HasUnsafeFn.unsafe_method();
+
+        // unsafe deref
+        let y = *x;
+
+        // unsafe access to a static mut
+        let a = global_mut.a;
+
+        // unsafe ref of packed fields
+        let packed = Packed { a: 0 };
+        let a = &packed.a;
+        let ref a = packed.a;
+        let Packed { ref a } = packed;
+        let Packed { a: ref _a } = packed;
+
+        // unsafe auto ref of packed field
+        packed.a.calls_autoref();
+    }
+}
\ No newline at end of file diff --git a/crates/ide/test_data/highlighting.html b/crates/ide/test_data/highlighting.html new file mode 100644 index 000000000..8e0160eee --- /dev/null +++ b/crates/ide/test_data/highlighting.html @@ -0,0 +1,131 @@ + + +
use inner::{self as inner_mod};
+mod inner {}
+
+#[derive(Clone, Debug)]
+struct Foo {
+    pub x: i32,
+    pub y: i32,
+}
+
+trait Bar {
+    fn bar(&self) -> i32;
+}
+
+impl Bar for Foo {
+    fn bar(&self) -> i32 {
+        self.x
+    }
+}
+
+impl Foo {
+    fn baz(mut self) -> i32 {
+        self.x
+    }
+
+    fn qux(&mut self) {
+        self.x = 0;
+    }
+}
+
+static mut STATIC_MUT: i32 = 0;
+
+fn foo<'a, T>() -> T {
+    foo::<'a, i32>()
+}
+
+macro_rules! def_fn {
+    ($($tt:tt)*) => {$($tt)*}
+}
+
+def_fn! {
+    fn bar() -> u32 {
+        100
+    }
+}
+
+macro_rules! noop {
+    ($expr:expr) => {
+        $expr
+    }
+}
+
+// comment
+fn main() {
+    println!("Hello, {}!", 92);
+
+    let mut vec = Vec::new();
+    if true {
+        let x = 92;
+        vec.push(Foo { x, y: 1 });
+    }
+    unsafe {
+        vec.set_len(0);
+        STATIC_MUT = 1;
+    }
+
+    for e in vec {
+        // Do nothing
+    }
+
+    noop!(noop!(1));
+
+    let mut x = 42;
+    let y = &mut x;
+    let z = &y;
+
+    let Foo { x: z, y } = Foo { x: z, y };
+
+    y;
+}
+
+enum Option<T> {
+    Some(T),
+    None,
+}
+use Option::*;
+
+impl<T> Option<T> {
+    fn and<U>(self, other: Option<U>) -> Option<(T, U)> {
+        match other {
+            None => unimplemented!(),
+            Nope => Nope,
+        }
+    }
+}
\ No newline at end of file diff --git a/crates/ide/test_data/rainbow_highlighting.html b/crates/ide/test_data/rainbow_highlighting.html new file mode 100644 index 000000000..401e87a73 --- /dev/null +++ b/crates/ide/test_data/rainbow_highlighting.html @@ -0,0 +1,49 @@ + + +
fn main() {
+    let hello = "hello";
+    let x = hello.to_string();
+    let y = hello.to_string();
+
+    let x = "other color please!";
+    let y = x.to_string();
+}
+
+fn bar() {
+    let mut hello = "hello";
+}
\ No newline at end of file -- cgit v1.2.3