From 757e593b253b4df7e6fc8bf15a4d4f34c9d484c5 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Wed, 27 Nov 2019 21:32:33 +0300 Subject: rename ra_ide_api -> ra_ide --- crates/ra_ide/src/assists.rs | 28 + crates/ra_ide/src/call_info.rs | 592 ++++++++++++++ crates/ra_ide/src/change.rs | 354 +++++++++ crates/ra_ide/src/completion.rs | 77 ++ crates/ra_ide/src/completion/complete_dot.rs | 456 +++++++++++ crates/ra_ide/src/completion/complete_fn_param.rs | 136 ++++ crates/ra_ide/src/completion/complete_keyword.rs | 781 ++++++++++++++++++ .../completion/complete_macro_in_item_position.rs | 143 ++++ crates/ra_ide/src/completion/complete_path.rs | 785 ++++++++++++++++++ crates/ra_ide/src/completion/complete_pattern.rs | 89 +++ crates/ra_ide/src/completion/complete_postfix.rs | 282 +++++++ .../src/completion/complete_record_literal.rs | 159 ++++ .../src/completion/complete_record_pattern.rs | 93 +++ crates/ra_ide/src/completion/complete_scope.rs | 876 +++++++++++++++++++++ crates/ra_ide/src/completion/complete_snippet.rs | 120 +++ crates/ra_ide/src/completion/completion_context.rs | 274 +++++++ crates/ra_ide/src/completion/completion_item.rs | 322 ++++++++ crates/ra_ide/src/completion/presentation.rs | 676 ++++++++++++++++ crates/ra_ide/src/db.rs | 144 ++++ crates/ra_ide/src/diagnostics.rs | 652 +++++++++++++++ crates/ra_ide/src/display.rs | 84 ++ crates/ra_ide/src/display/function_signature.rs | 215 +++++ crates/ra_ide/src/display/navigation_target.rs | 411 ++++++++++ crates/ra_ide/src/display/short_label.rs | 97 +++ crates/ra_ide/src/display/structure.rs | 401 ++++++++++ crates/ra_ide/src/expand.rs | 63 ++ crates/ra_ide/src/expand_macro.rs | 295 +++++++ crates/ra_ide/src/extend_selection.rs | 452 +++++++++++ crates/ra_ide/src/feature_flags.rs | 70 ++ crates/ra_ide/src/folding_ranges.rs | 378 +++++++++ crates/ra_ide/src/goto_definition.rs | 696 ++++++++++++++++ crates/ra_ide/src/goto_type_definition.rs | 105 +++ crates/ra_ide/src/hover.rs | 730 +++++++++++++++++ crates/ra_ide/src/impls.rs | 206 +++++ crates/ra_ide/src/inlay_hints.rs | 543 +++++++++++++ crates/ra_ide/src/join_lines.rs | 611 ++++++++++++++ crates/ra_ide/src/lib.rs | 489 ++++++++++++ crates/ra_ide/src/line_index.rs | 283 +++++++ crates/ra_ide/src/line_index_utils.rs | 331 ++++++++ crates/ra_ide/src/marks.rs | 13 + crates/ra_ide/src/matching_brace.rs | 43 + crates/ra_ide/src/mock_analysis.rs | 149 ++++ crates/ra_ide/src/parent_module.rs | 104 +++ crates/ra_ide/src/references.rs | 389 +++++++++ crates/ra_ide/src/references/classify.rs | 186 +++++ crates/ra_ide/src/references/name_definition.rs | 83 ++ crates/ra_ide/src/references/rename.rs | 328 ++++++++ crates/ra_ide/src/references/search_scope.rs | 145 ++++ crates/ra_ide/src/runnables.rs | 242 ++++++ crates/ra_ide/src/snapshots/highlighting.html | 48 ++ .../ra_ide/src/snapshots/rainbow_highlighting.html | 33 + crates/ra_ide/src/source_change.rs | 119 +++ crates/ra_ide/src/status.rs | 136 ++++ crates/ra_ide/src/symbol_index.rs | 405 ++++++++++ crates/ra_ide/src/syntax_highlighting.rs | 342 ++++++++ crates/ra_ide/src/syntax_tree.rs | 359 +++++++++ crates/ra_ide/src/test_utils.rs | 21 + crates/ra_ide/src/typing.rs | 490 ++++++++++++ crates/ra_ide/src/wasm_shims.rs | 19 + 59 files changed, 17153 insertions(+) create mode 100644 crates/ra_ide/src/assists.rs create mode 100644 crates/ra_ide/src/call_info.rs create mode 100644 crates/ra_ide/src/change.rs create mode 100644 crates/ra_ide/src/completion.rs create mode 100644 crates/ra_ide/src/completion/complete_dot.rs create mode 100644 crates/ra_ide/src/completion/complete_fn_param.rs create mode 100644 crates/ra_ide/src/completion/complete_keyword.rs create mode 100644 crates/ra_ide/src/completion/complete_macro_in_item_position.rs create mode 100644 crates/ra_ide/src/completion/complete_path.rs create mode 100644 crates/ra_ide/src/completion/complete_pattern.rs create mode 100644 crates/ra_ide/src/completion/complete_postfix.rs create mode 100644 crates/ra_ide/src/completion/complete_record_literal.rs create mode 100644 crates/ra_ide/src/completion/complete_record_pattern.rs create mode 100644 crates/ra_ide/src/completion/complete_scope.rs create mode 100644 crates/ra_ide/src/completion/complete_snippet.rs create mode 100644 crates/ra_ide/src/completion/completion_context.rs create mode 100644 crates/ra_ide/src/completion/completion_item.rs create mode 100644 crates/ra_ide/src/completion/presentation.rs create mode 100644 crates/ra_ide/src/db.rs create mode 100644 crates/ra_ide/src/diagnostics.rs create mode 100644 crates/ra_ide/src/display.rs create mode 100644 crates/ra_ide/src/display/function_signature.rs create mode 100644 crates/ra_ide/src/display/navigation_target.rs create mode 100644 crates/ra_ide/src/display/short_label.rs create mode 100644 crates/ra_ide/src/display/structure.rs create mode 100644 crates/ra_ide/src/expand.rs create mode 100644 crates/ra_ide/src/expand_macro.rs create mode 100644 crates/ra_ide/src/extend_selection.rs create mode 100644 crates/ra_ide/src/feature_flags.rs create mode 100644 crates/ra_ide/src/folding_ranges.rs create mode 100644 crates/ra_ide/src/goto_definition.rs create mode 100644 crates/ra_ide/src/goto_type_definition.rs create mode 100644 crates/ra_ide/src/hover.rs create mode 100644 crates/ra_ide/src/impls.rs create mode 100644 crates/ra_ide/src/inlay_hints.rs create mode 100644 crates/ra_ide/src/join_lines.rs create mode 100644 crates/ra_ide/src/lib.rs create mode 100644 crates/ra_ide/src/line_index.rs create mode 100644 crates/ra_ide/src/line_index_utils.rs create mode 100644 crates/ra_ide/src/marks.rs create mode 100644 crates/ra_ide/src/matching_brace.rs create mode 100644 crates/ra_ide/src/mock_analysis.rs create mode 100644 crates/ra_ide/src/parent_module.rs create mode 100644 crates/ra_ide/src/references.rs create mode 100644 crates/ra_ide/src/references/classify.rs create mode 100644 crates/ra_ide/src/references/name_definition.rs create mode 100644 crates/ra_ide/src/references/rename.rs create mode 100644 crates/ra_ide/src/references/search_scope.rs create mode 100644 crates/ra_ide/src/runnables.rs create mode 100644 crates/ra_ide/src/snapshots/highlighting.html create mode 100644 crates/ra_ide/src/snapshots/rainbow_highlighting.html create mode 100644 crates/ra_ide/src/source_change.rs create mode 100644 crates/ra_ide/src/status.rs create mode 100644 crates/ra_ide/src/symbol_index.rs create mode 100644 crates/ra_ide/src/syntax_highlighting.rs create mode 100644 crates/ra_ide/src/syntax_tree.rs create mode 100644 crates/ra_ide/src/test_utils.rs create mode 100644 crates/ra_ide/src/typing.rs create mode 100644 crates/ra_ide/src/wasm_shims.rs (limited to 'crates/ra_ide/src') diff --git a/crates/ra_ide/src/assists.rs b/crates/ra_ide/src/assists.rs new file mode 100644 index 000000000..e00589733 --- /dev/null +++ b/crates/ra_ide/src/assists.rs @@ -0,0 +1,28 @@ +//! FIXME: write short doc here + +use ra_db::{FilePosition, FileRange}; + +use crate::{db::RootDatabase, SourceChange, SourceFileEdit}; + +pub use ra_assists::AssistId; + +#[derive(Debug)] +pub struct Assist { + pub id: AssistId, + pub change: SourceChange, +} + +pub(crate) fn assists(db: &RootDatabase, frange: FileRange) -> Vec { + ra_assists::assists(db, frange) + .into_iter() + .map(|(label, action)| { + let file_id = frange.file_id; + let file_edit = SourceFileEdit { file_id, edit: action.edit }; + let id = label.id; + let change = SourceChange::source_file_edit(label.label, file_edit).with_cursor_opt( + action.cursor_position.map(|offset| FilePosition { offset, file_id }), + ); + Assist { id, change } + }) + .collect() +} diff --git a/crates/ra_ide/src/call_info.rs b/crates/ra_ide/src/call_info.rs new file mode 100644 index 000000000..d559dc4d0 --- /dev/null +++ b/crates/ra_ide/src/call_info.rs @@ -0,0 +1,592 @@ +//! FIXME: write short doc here + +use ra_db::SourceDatabase; +use ra_syntax::{ + algo::ancestors_at_offset, + ast::{self, ArgListOwner}, + match_ast, AstNode, SyntaxNode, TextUnit, +}; +use test_utils::tested_by; + +use crate::{db::RootDatabase, CallInfo, FilePosition, FunctionSignature}; + +/// Computes parameter information for the given call expression. +pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option { + let parse = db.parse(position.file_id); + let syntax = parse.tree().syntax().clone(); + + // Find the calling expression and it's NameRef + let calling_node = FnCallNode::with_node(&syntax, position.offset)?; + let name_ref = calling_node.name_ref()?; + let name_ref = hir::Source::new(position.file_id.into(), name_ref.syntax()); + + let analyzer = hir::SourceAnalyzer::new(db, name_ref, None); + let (mut call_info, has_self) = match &calling_node { + FnCallNode::CallExpr(expr) => { + //FIXME: Type::as_callable is broken + let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; + match callable_def { + hir::CallableDef::FunctionId(it) => { + let fn_def = it.into(); + (CallInfo::with_fn(db, fn_def), fn_def.has_self_param(db)) + } + hir::CallableDef::StructId(it) => (CallInfo::with_struct(db, it.into())?, false), + hir::CallableDef::EnumVariantId(it) => { + (CallInfo::with_enum_variant(db, it.into())?, false) + } + } + } + FnCallNode::MethodCallExpr(expr) => { + let function = analyzer.resolve_method_call(&expr)?; + (CallInfo::with_fn(db, function), function.has_self_param(db)) + } + FnCallNode::MacroCallExpr(expr) => { + let macro_def = analyzer.resolve_macro_call(db, name_ref.with_value(&expr))?; + (CallInfo::with_macro(db, macro_def)?, false) + } + }; + + // If we have a calling expression let's find which argument we are on + let num_params = call_info.parameters().len(); + + if num_params == 1 { + if !has_self { + call_info.active_parameter = Some(0); + } + } else if num_params > 1 { + // Count how many parameters into the call we are. + if let Some(arg_list) = calling_node.arg_list() { + // Number of arguments specified at the call site + let num_args_at_callsite = arg_list.args().count(); + + let arg_list_range = arg_list.syntax().text_range(); + if !arg_list_range.contains_inclusive(position.offset) { + tested_by!(call_info_bad_offset); + return None; + } + + let mut param = std::cmp::min( + num_args_at_callsite, + arg_list + .args() + .take_while(|arg| arg.syntax().text_range().end() < position.offset) + .count(), + ); + + // If we are in a method account for `self` + if has_self { + param += 1; + } + + call_info.active_parameter = Some(param); + } + } + + Some(call_info) +} + +#[derive(Debug)] +enum FnCallNode { + CallExpr(ast::CallExpr), + MethodCallExpr(ast::MethodCallExpr), + MacroCallExpr(ast::MacroCall), +} + +impl FnCallNode { + fn with_node(syntax: &SyntaxNode, offset: TextUnit) -> Option { + ancestors_at_offset(syntax, offset).find_map(|node| { + match_ast! { + match node { + ast::CallExpr(it) => { Some(FnCallNode::CallExpr(it)) }, + ast::MethodCallExpr(it) => { Some(FnCallNode::MethodCallExpr(it)) }, + ast::MacroCall(it) => { Some(FnCallNode::MacroCallExpr(it)) }, + _ => { None }, + } + } + }) + } + + fn name_ref(&self) -> Option { + match self { + FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()? { + ast::Expr::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?, + _ => return None, + }), + + FnCallNode::MethodCallExpr(call_expr) => { + call_expr.syntax().children().filter_map(ast::NameRef::cast).nth(0) + } + + FnCallNode::MacroCallExpr(call_expr) => call_expr.path()?.segment()?.name_ref(), + } + } + + fn arg_list(&self) -> Option { + match self { + FnCallNode::CallExpr(expr) => expr.arg_list(), + FnCallNode::MethodCallExpr(expr) => expr.arg_list(), + FnCallNode::MacroCallExpr(_) => None, + } + } +} + +impl CallInfo { + fn with_fn(db: &RootDatabase, function: hir::Function) -> Self { + let signature = FunctionSignature::from_hir(db, function); + + CallInfo { signature, active_parameter: None } + } + + fn with_struct(db: &RootDatabase, st: hir::Struct) -> Option { + let signature = FunctionSignature::from_struct(db, st)?; + + Some(CallInfo { signature, active_parameter: None }) + } + + fn with_enum_variant(db: &RootDatabase, variant: hir::EnumVariant) -> Option { + let signature = FunctionSignature::from_enum_variant(db, variant)?; + + Some(CallInfo { signature, active_parameter: None }) + } + + fn with_macro(db: &RootDatabase, macro_def: hir::MacroDef) -> Option { + let signature = FunctionSignature::from_macro(db, macro_def)?; + + Some(CallInfo { signature, active_parameter: None }) + } + + fn parameters(&self) -> &[String] { + &self.signature.parameters + } +} + +#[cfg(test)] +mod tests { + use test_utils::covers; + + use crate::mock_analysis::single_file_with_position; + + use super::*; + + // These are only used when testing + impl CallInfo { + fn doc(&self) -> Option { + self.signature.doc.clone() + } + + fn label(&self) -> String { + self.signature.to_string() + } + } + + fn call_info(text: &str) -> CallInfo { + let (analysis, position) = single_file_with_position(text); + analysis.call_info(position).unwrap().unwrap() + } + + #[test] + fn test_fn_signature_two_args_firstx() { + let info = call_info( + r#"fn foo(x: u32, y: u32) -> u32 {x + y} +fn bar() { foo(<|>3, ); }"#, + ); + + assert_eq!(info.parameters(), ["x: u32", "y: u32"]); + assert_eq!(info.active_parameter, Some(0)); + } + + #[test] + fn test_fn_signature_two_args_second() { + let info = call_info( + r#"fn foo(x: u32, y: u32) -> u32 {x + y} +fn bar() { foo(3, <|>); }"#, + ); + + assert_eq!(info.parameters(), ["x: u32", "y: u32"]); + assert_eq!(info.active_parameter, Some(1)); + } + + #[test] + fn test_fn_signature_two_args_empty() { + let info = call_info( + r#"fn foo(x: u32, y: u32) -> u32 {x + y} +fn bar() { foo(<|>); }"#, + ); + + assert_eq!(info.parameters(), ["x: u32", "y: u32"]); + assert_eq!(info.active_parameter, Some(0)); + } + + #[test] + fn test_fn_signature_two_args_first_generics() { + let info = call_info( + r#"fn foo(x: T, y: U) -> u32 where T: Copy + Display, U: Debug {x + y} +fn bar() { foo(<|>3, ); }"#, + ); + + assert_eq!(info.parameters(), ["x: T", "y: U"]); + assert_eq!( + info.label(), + r#" +fn foo(x: T, y: U) -> u32 +where T: Copy + Display, + U: Debug + "# + .trim() + ); + assert_eq!(info.active_parameter, Some(0)); + } + + #[test] + fn test_fn_signature_no_params() { + let info = call_info( + r#"fn foo() -> T where T: Copy + Display {} +fn bar() { foo(<|>); }"#, + ); + + assert!(info.parameters().is_empty()); + assert_eq!( + info.label(), + r#" +fn foo() -> T +where T: Copy + Display + "# + .trim() + ); + assert!(info.active_parameter.is_none()); + } + + #[test] + fn test_fn_signature_for_impl() { + let info = call_info( + r#"struct F; impl F { pub fn new() { F{}} } +fn bar() {let _ : F = F::new(<|>);}"#, + ); + + assert!(info.parameters().is_empty()); + assert_eq!(info.active_parameter, None); + } + + #[test] + fn test_fn_signature_for_method_self() { + let info = call_info( + r#"struct F; +impl F { + pub fn new() -> F{ + F{} + } + + pub fn do_it(&self) {} +} + +fn bar() { + let f : F = F::new(); + f.do_it(<|>); +}"#, + ); + + assert_eq!(info.parameters(), ["&self"]); + assert_eq!(info.active_parameter, None); + } + + #[test] + fn test_fn_signature_for_method_with_arg() { + let info = call_info( + r#"struct F; +impl F { + pub fn new() -> F{ + F{} + } + + pub fn do_it(&self, x: i32) {} +} + +fn bar() { + let f : F = F::new(); + f.do_it(<|>); +}"#, + ); + + assert_eq!(info.parameters(), ["&self", "x: i32"]); + assert_eq!(info.active_parameter, Some(1)); + } + + #[test] + fn test_fn_signature_with_docs_simple() { + let info = call_info( + r#" +/// test +// non-doc-comment +fn foo(j: u32) -> u32 { + j +} + +fn bar() { + let _ = foo(<|>); +} +"#, + ); + + assert_eq!(info.parameters(), ["j: u32"]); + assert_eq!(info.active_parameter, Some(0)); + assert_eq!(info.label(), "fn foo(j: u32) -> u32"); + assert_eq!(info.doc().map(|it| it.into()), Some("test".to_string())); + } + + #[test] + fn test_fn_signature_with_docs() { + let info = call_info( + r#" +/// Adds one to the number given. +/// +/// # Examples +/// +/// ``` +/// let five = 5; +/// +/// assert_eq!(6, my_crate::add_one(5)); +/// ``` +pub fn add_one(x: i32) -> i32 { + x + 1 +} + +pub fn do() { + add_one(<|> +}"#, + ); + + assert_eq!(info.parameters(), ["x: i32"]); + assert_eq!(info.active_parameter, Some(0)); + assert_eq!(info.label(), "pub fn add_one(x: i32) -> i32"); + assert_eq!( + info.doc().map(|it| it.into()), + Some( + r#"Adds one to the number given. + +# Examples + +``` +let five = 5; + +assert_eq!(6, my_crate::add_one(5)); +```"# + .to_string() + ) + ); + } + + #[test] + fn test_fn_signature_with_docs_impl() { + let info = call_info( + r#" +struct addr; +impl addr { + /// Adds one to the number given. + /// + /// # Examples + /// + /// ``` + /// let five = 5; + /// + /// assert_eq!(6, my_crate::add_one(5)); + /// ``` + pub fn add_one(x: i32) -> i32 { + x + 1 + } +} + +pub fn do_it() { + addr {}; + addr::add_one(<|>); +}"#, + ); + + assert_eq!(info.parameters(), ["x: i32"]); + assert_eq!(info.active_parameter, Some(0)); + assert_eq!(info.label(), "pub fn add_one(x: i32) -> i32"); + assert_eq!( + info.doc().map(|it| it.into()), + Some( + r#"Adds one to the number given. + +# Examples + +``` +let five = 5; + +assert_eq!(6, my_crate::add_one(5)); +```"# + .to_string() + ) + ); + } + + #[test] + fn test_fn_signature_with_docs_from_actix() { + let info = call_info( + r#" +struct WriteHandler; + +impl WriteHandler { + /// Method is called when writer emits error. + /// + /// If this method returns `ErrorAction::Continue` writer processing + /// continues otherwise stream processing stops. + fn error(&mut self, err: E, ctx: &mut Self::Context) -> Running { + Running::Stop + } + + /// Method is called when writer finishes. + /// + /// By default this method stops actor's `Context`. + fn finished(&mut self, ctx: &mut Self::Context) { + ctx.stop() + } +} + +pub fn foo(mut r: WriteHandler<()>) { + r.finished(<|>); +} + +"#, + ); + + assert_eq!(info.label(), "fn finished(&mut self, ctx: &mut Self::Context)".to_string()); + assert_eq!(info.parameters(), ["&mut self", "ctx: &mut Self::Context"]); + assert_eq!(info.active_parameter, Some(1)); + assert_eq!( + info.doc().map(|it| it.into()), + Some( + r#"Method is called when writer finishes. + +By default this method stops actor's `Context`."# + .to_string() + ) + ); + } + + #[test] + fn call_info_bad_offset() { + covers!(call_info_bad_offset); + let (analysis, position) = single_file_with_position( + r#"fn foo(x: u32, y: u32) -> u32 {x + y} + fn bar() { foo <|> (3, ); }"#, + ); + let call_info = analysis.call_info(position).unwrap(); + assert!(call_info.is_none()); + } + + #[test] + fn test_nested_method_in_lamba() { + let info = call_info( + r#"struct Foo; + +impl Foo { + fn bar(&self, _: u32) { } +} + +fn bar(_: u32) { } + +fn main() { + let foo = Foo; + std::thread::spawn(move || foo.bar(<|>)); +}"#, + ); + + assert_eq!(info.parameters(), ["&self", "_: u32"]); + assert_eq!(info.active_parameter, Some(1)); + assert_eq!(info.label(), "fn bar(&self, _: u32)"); + } + + #[test] + fn works_for_tuple_structs() { + let info = call_info( + r#" +/// A cool tuple struct +struct TS(u32, i32); +fn main() { + let s = TS(0, <|>); +}"#, + ); + + assert_eq!(info.label(), "struct TS(u32, i32) -> TS"); + assert_eq!(info.doc().map(|it| it.into()), Some("A cool tuple struct".to_string())); + assert_eq!(info.active_parameter, Some(1)); + } + + #[test] + #[should_panic] + fn cant_call_named_structs() { + let _ = call_info( + r#" +struct TS { x: u32, y: i32 } +fn main() { + let s = TS(<|>); +}"#, + ); + } + + #[test] + fn works_for_enum_variants() { + let info = call_info( + r#" +enum E { + /// A Variant + A(i32), + /// Another + B, + /// And C + C { a: i32, b: i32 } +} + +fn main() { + let a = E::A(<|>); +} + "#, + ); + + assert_eq!(info.label(), "E::A(0: i32)"); + assert_eq!(info.doc().map(|it| it.into()), Some("A Variant".to_string())); + assert_eq!(info.active_parameter, Some(0)); + } + + #[test] + #[should_panic] + fn cant_call_enum_records() { + let _ = call_info( + r#" +enum E { + /// A Variant + A(i32), + /// Another + B, + /// And C + C { a: i32, b: i32 } +} + +fn main() { + let a = E::C(<|>); +} + "#, + ); + } + + #[test] + fn fn_signature_for_macro() { + let info = call_info( + r#" +/// empty macro +macro_rules! foo { + () => {} +} + +fn f() { + foo!(<|>); +} + "#, + ); + + assert_eq!(info.label(), "foo!()"); + assert_eq!(info.doc().map(|it| it.into()), Some("empty macro".to_string())); + } +} diff --git a/crates/ra_ide/src/change.rs b/crates/ra_ide/src/change.rs new file mode 100644 index 000000000..4a76d1dd8 --- /dev/null +++ b/crates/ra_ide/src/change.rs @@ -0,0 +1,354 @@ +//! FIXME: write short doc here + +use std::{fmt, sync::Arc, time}; + +use ra_db::{ + salsa::{Database, Durability, SweepStrategy}, + CrateGraph, CrateId, FileId, RelativePathBuf, SourceDatabase, SourceDatabaseExt, SourceRoot, + SourceRootId, +}; +use ra_prof::{memory_usage, profile, Bytes}; +use ra_syntax::SourceFile; +#[cfg(not(feature = "wasm"))] +use rayon::prelude::*; +use rustc_hash::FxHashMap; + +use crate::{ + db::{DebugData, RootDatabase}, + symbol_index::{SymbolIndex, SymbolsDatabase}, +}; + +#[derive(Default)] +pub struct AnalysisChange { + new_roots: Vec<(SourceRootId, bool)>, + roots_changed: FxHashMap, + files_changed: Vec<(FileId, Arc)>, + libraries_added: Vec, + crate_graph: Option, + debug_data: DebugData, +} + +impl fmt::Debug for AnalysisChange { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + let mut d = fmt.debug_struct("AnalysisChange"); + if !self.new_roots.is_empty() { + d.field("new_roots", &self.new_roots); + } + if !self.roots_changed.is_empty() { + d.field("roots_changed", &self.roots_changed); + } + if !self.files_changed.is_empty() { + d.field("files_changed", &self.files_changed.len()); + } + if !self.libraries_added.is_empty() { + d.field("libraries_added", &self.libraries_added.len()); + } + if !self.crate_graph.is_none() { + d.field("crate_graph", &self.crate_graph); + } + d.finish() + } +} + +impl AnalysisChange { + pub fn new() -> AnalysisChange { + AnalysisChange::default() + } + + pub fn add_root(&mut self, root_id: SourceRootId, is_local: bool) { + self.new_roots.push((root_id, is_local)); + } + + pub fn add_file( + &mut self, + root_id: SourceRootId, + file_id: FileId, + path: RelativePathBuf, + text: Arc, + ) { + let file = AddFile { file_id, path, text }; + self.roots_changed.entry(root_id).or_default().added.push(file); + } + + pub fn change_file(&mut self, file_id: FileId, new_text: Arc) { + self.files_changed.push((file_id, new_text)) + } + + pub fn remove_file(&mut self, root_id: SourceRootId, file_id: FileId, path: RelativePathBuf) { + let file = RemoveFile { file_id, path }; + self.roots_changed.entry(root_id).or_default().removed.push(file); + } + + pub fn add_library(&mut self, data: LibraryData) { + self.libraries_added.push(data) + } + + pub fn set_crate_graph(&mut self, graph: CrateGraph) { + self.crate_graph = Some(graph); + } + + pub fn set_debug_crate_name(&mut self, crate_id: CrateId, name: String) { + self.debug_data.crate_names.insert(crate_id, name); + } + + pub fn set_debug_root_path(&mut self, source_root_id: SourceRootId, path: String) { + self.debug_data.root_paths.insert(source_root_id, path); + } +} + +#[derive(Debug)] +struct AddFile { + file_id: FileId, + path: RelativePathBuf, + text: Arc, +} + +#[derive(Debug)] +struct RemoveFile { + file_id: FileId, + path: RelativePathBuf, +} + +#[derive(Default)] +struct RootChange { + added: Vec, + removed: Vec, +} + +impl fmt::Debug for RootChange { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + fmt.debug_struct("AnalysisChange") + .field("added", &self.added.len()) + .field("removed", &self.removed.len()) + .finish() + } +} + +pub struct LibraryData { + root_id: SourceRootId, + root_change: RootChange, + symbol_index: SymbolIndex, +} + +impl fmt::Debug for LibraryData { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("LibraryData") + .field("root_id", &self.root_id) + .field("root_change", &self.root_change) + .field("n_symbols", &self.symbol_index.len()) + .finish() + } +} + +impl LibraryData { + pub fn prepare( + root_id: SourceRootId, + files: Vec<(FileId, RelativePathBuf, Arc)>, + ) -> LibraryData { + #[cfg(not(feature = "wasm"))] + let iter = files.par_iter(); + #[cfg(feature = "wasm")] + let iter = files.iter(); + + let symbol_index = SymbolIndex::for_files(iter.map(|(file_id, _, text)| { + let parse = SourceFile::parse(text); + (*file_id, parse) + })); + let mut root_change = RootChange::default(); + root_change.added = files + .into_iter() + .map(|(file_id, path, text)| AddFile { file_id, path, text }) + .collect(); + LibraryData { root_id, root_change, symbol_index } + } +} + +const GC_COOLDOWN: time::Duration = time::Duration::from_millis(100); + +impl RootDatabase { + pub(crate) fn apply_change(&mut self, change: AnalysisChange) { + let _p = profile("RootDatabase::apply_change"); + log::info!("apply_change {:?}", change); + { + let _p = profile("RootDatabase::apply_change/cancellation"); + self.salsa_runtime_mut().synthetic_write(Durability::LOW); + } + if !change.new_roots.is_empty() { + let mut local_roots = Vec::clone(&self.local_roots()); + for (root_id, is_local) in change.new_roots { + let root = if is_local { SourceRoot::new() } else { SourceRoot::new_library() }; + let durability = durability(&root); + self.set_source_root_with_durability(root_id, Arc::new(root), durability); + if is_local { + local_roots.push(root_id); + } + } + self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); + } + + for (root_id, root_change) in change.roots_changed { + self.apply_root_change(root_id, root_change); + } + for (file_id, text) in change.files_changed { + let source_root_id = self.file_source_root(file_id); + let source_root = self.source_root(source_root_id); + let durability = durability(&source_root); + self.set_file_text_with_durability(file_id, text, durability) + } + if !change.libraries_added.is_empty() { + let mut libraries = Vec::clone(&self.library_roots()); + for library in change.libraries_added { + libraries.push(library.root_id); + self.set_source_root_with_durability( + library.root_id, + Default::default(), + Durability::HIGH, + ); + self.set_library_symbols_with_durability( + library.root_id, + Arc::new(library.symbol_index), + Durability::HIGH, + ); + self.apply_root_change(library.root_id, library.root_change); + } + self.set_library_roots_with_durability(Arc::new(libraries), Durability::HIGH); + } + if let Some(crate_graph) = change.crate_graph { + self.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH) + } + + Arc::make_mut(&mut self.debug_data).merge(change.debug_data) + } + + fn apply_root_change(&mut self, root_id: SourceRootId, root_change: RootChange) { + let mut source_root = SourceRoot::clone(&self.source_root(root_id)); + let durability = durability(&source_root); + for add_file in root_change.added { + self.set_file_text_with_durability(add_file.file_id, add_file.text, durability); + self.set_file_relative_path_with_durability( + add_file.file_id, + add_file.path.clone(), + durability, + ); + self.set_file_source_root_with_durability(add_file.file_id, root_id, durability); + source_root.insert_file(add_file.path, add_file.file_id); + } + for remove_file in root_change.removed { + self.set_file_text_with_durability(remove_file.file_id, Default::default(), durability); + source_root.remove_file(&remove_file.path); + } + self.set_source_root_with_durability(root_id, Arc::new(source_root), durability); + } + + pub(crate) fn maybe_collect_garbage(&mut self) { + if cfg!(feature = "wasm") { + return; + } + + if self.last_gc_check.elapsed() > GC_COOLDOWN { + self.last_gc_check = crate::wasm_shims::Instant::now(); + } + } + + pub(crate) fn collect_garbage(&mut self) { + if cfg!(feature = "wasm") { + return; + } + + let _p = profile("RootDatabase::collect_garbage"); + self.last_gc = crate::wasm_shims::Instant::now(); + + let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); + + self.query(ra_db::ParseQuery).sweep(sweep); + self.query(hir::db::ParseMacroQuery).sweep(sweep); + + // Macros do take significant space, but less then the syntax trees + // self.query(hir::db::MacroDefQuery).sweep(sweep); + // self.query(hir::db::MacroArgQuery).sweep(sweep); + // self.query(hir::db::MacroExpandQuery).sweep(sweep); + + self.query(hir::db::AstIdMapQuery).sweep(sweep); + + self.query(hir::db::RawItemsWithSourceMapQuery).sweep(sweep); + self.query(hir::db::BodyWithSourceMapQuery).sweep(sweep); + + self.query(hir::db::ExprScopesQuery).sweep(sweep); + self.query(hir::db::InferQuery).sweep(sweep); + self.query(hir::db::BodyQuery).sweep(sweep); + } + + pub(crate) fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> { + let mut acc: Vec<(String, Bytes)> = vec![]; + let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); + macro_rules! sweep_each_query { + ($($q:path)*) => {$( + let before = memory_usage().allocated; + self.query($q).sweep(sweep); + let after = memory_usage().allocated; + let q: $q = Default::default(); + let name = format!("{:?}", q); + acc.push((name, before - after)); + + let before = memory_usage().allocated; + self.query($q).sweep(sweep.discard_everything()); + let after = memory_usage().allocated; + let q: $q = Default::default(); + let name = format!("{:?} (deps)", q); + acc.push((name, before - after)); + )*} + } + sweep_each_query![ + ra_db::ParseQuery + ra_db::SourceRootCratesQuery + hir::db::AstIdMapQuery + hir::db::ParseMacroQuery + hir::db::MacroDefQuery + hir::db::MacroArgQuery + hir::db::MacroExpandQuery + hir::db::StructDataQuery + hir::db::EnumDataQuery + hir::db::TraitDataQuery + hir::db::RawItemsWithSourceMapQuery + hir::db::RawItemsQuery + hir::db::CrateDefMapQuery + hir::db::GenericParamsQuery + hir::db::FunctionDataQuery + hir::db::TypeAliasDataQuery + hir::db::ConstDataQuery + hir::db::StaticDataQuery + hir::db::ModuleLangItemsQuery + hir::db::CrateLangItemsQuery + hir::db::LangItemQuery + hir::db::DocumentationQuery + hir::db::ExprScopesQuery + hir::db::InferQuery + hir::db::TyQuery + hir::db::ValueTyQuery + hir::db::FieldTypesQuery + hir::db::CallableItemSignatureQuery + hir::db::GenericPredicatesQuery + hir::db::GenericDefaultsQuery + hir::db::BodyWithSourceMapQuery + hir::db::BodyQuery + hir::db::ImplsInCrateQuery + hir::db::ImplsForTraitQuery + hir::db::AssociatedTyDataQuery + hir::db::TraitDatumQuery + hir::db::StructDatumQuery + hir::db::ImplDatumQuery + hir::db::ImplDataQuery + hir::db::TraitSolveQuery + ]; + acc.sort_by_key(|it| std::cmp::Reverse(it.1)); + acc + } +} + +fn durability(source_root: &SourceRoot) -> Durability { + if source_root.is_library { + Durability::HIGH + } else { + Durability::LOW + } +} diff --git a/crates/ra_ide/src/completion.rs b/crates/ra_ide/src/completion.rs new file mode 100644 index 000000000..abe1f36ce --- /dev/null +++ b/crates/ra_ide/src/completion.rs @@ -0,0 +1,77 @@ +//! FIXME: write short doc here + +mod completion_item; +mod completion_context; +mod presentation; + +mod complete_dot; +mod complete_record_literal; +mod complete_record_pattern; +mod complete_pattern; +mod complete_fn_param; +mod complete_keyword; +mod complete_snippet; +mod complete_path; +mod complete_scope; +mod complete_postfix; +mod complete_macro_in_item_position; + +use ra_db::SourceDatabase; + +#[cfg(test)] +use crate::completion::completion_item::do_completion; +use crate::{ + completion::{ + completion_context::CompletionContext, + completion_item::{CompletionKind, Completions}, + }, + db, FilePosition, +}; + +pub use crate::completion::completion_item::{ + CompletionItem, CompletionItemKind, InsertTextFormat, +}; + +/// Main entry point for completion. We run completion as a two-phase process. +/// +/// First, we look at the position and collect a so-called `CompletionContext. +/// This is a somewhat messy process, because, during completion, syntax tree is +/// incomplete and can look really weird. +/// +/// Once the context is collected, we run a series of completion routines which +/// look at the context and produce completion items. One subtlety about this +/// phase is that completion engine should not filter by the substring which is +/// already present, it should give all possible variants for the identifier at +/// the caret. In other words, for +/// +/// ```no-run +/// fn f() { +/// let foo = 92; +/// let _ = bar<|> +/// } +/// ``` +/// +/// `foo` *should* be present among the completion variants. Filtering by +/// identifier prefix/fuzzy match should be done higher in the stack, together +/// with ordering of completions (currently this is done by the client). +pub(crate) fn completions(db: &db::RootDatabase, position: FilePosition) -> Option { + let original_parse = db.parse(position.file_id); + let ctx = CompletionContext::new(db, &original_parse, position)?; + + let mut acc = Completions::default(); + + complete_fn_param::complete_fn_param(&mut acc, &ctx); + complete_keyword::complete_expr_keyword(&mut acc, &ctx); + complete_keyword::complete_use_tree_keyword(&mut acc, &ctx); + complete_snippet::complete_expr_snippet(&mut acc, &ctx); + complete_snippet::complete_item_snippet(&mut acc, &ctx); + complete_path::complete_path(&mut acc, &ctx); + complete_scope::complete_scope(&mut acc, &ctx); + complete_dot::complete_dot(&mut acc, &ctx); + complete_record_literal::complete_record_literal(&mut acc, &ctx); + complete_record_pattern::complete_record_pattern(&mut acc, &ctx); + complete_pattern::complete_pattern(&mut acc, &ctx); + complete_postfix::complete_postfix(&mut acc, &ctx); + complete_macro_in_item_position::complete_macro_in_item_position(&mut acc, &ctx); + Some(acc) +} diff --git a/crates/ra_ide/src/completion/complete_dot.rs b/crates/ra_ide/src/completion/complete_dot.rs new file mode 100644 index 000000000..b6fe48627 --- /dev/null +++ b/crates/ra_ide/src/completion/complete_dot.rs @@ -0,0 +1,456 @@ +//! FIXME: write short doc here + +use hir::Type; + +use crate::completion::completion_item::CompletionKind; +use crate::{ + completion::{completion_context::CompletionContext, completion_item::Completions}, + CompletionItem, +}; +use rustc_hash::FxHashSet; + +/// Complete dot accesses, i.e. fields or methods (and .await syntax). +pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) { + let dot_receiver = match &ctx.dot_receiver { + Some(expr) => expr, + _ => return, + }; + + let receiver_ty = match ctx.analyzer.type_of(ctx.db, &dot_receiver) { + Some(ty) => ty, + _ => return, + }; + + if !ctx.is_call { + complete_fields(acc, ctx, &receiver_ty); + } + complete_methods(acc, ctx, &receiver_ty); + + // Suggest .await syntax for types that implement Future trait + if ctx.analyzer.impls_future(ctx.db, receiver_ty.into_ty()) { + CompletionItem::new(CompletionKind::Keyword, ctx.source_range(), "await") + .detail("expr.await") + .insert_text("await") + .add_to(acc); + } +} + +fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) { + for receiver in receiver.autoderef(ctx.db) { + for (field, ty) in receiver.fields(ctx.db) { + acc.add_field(ctx, field, &ty); + } + for (i, ty) in receiver.tuple_fields(ctx.db).into_iter().enumerate() { + acc.add_tuple_field(ctx, i, &ty); + } + } +} + +fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) { + let mut seen_methods = FxHashSet::default(); + ctx.analyzer.iterate_method_candidates(ctx.db, receiver, None, |_ty, func| { + if func.has_self_param(ctx.db) && seen_methods.insert(func.name(ctx.db)) { + acc.add_function(ctx, func); + } + None::<()> + }); +} + +#[cfg(test)] +mod tests { + use crate::completion::{do_completion, CompletionItem, CompletionKind}; + use insta::assert_debug_snapshot; + + fn do_ref_completion(code: &str) -> Vec { + do_completion(code, CompletionKind::Reference) + } + + #[test] + fn test_struct_field_completion() { + assert_debug_snapshot!( + do_ref_completion( + r" + struct A { the_field: u32 } + fn foo(a: A) { + a.<|> + } + ", + ), + @r###" + [ + CompletionItem { + label: "the_field", + source_range: [94; 94), + delete: [94; 94), + insert: "the_field", + kind: Field, + detail: "u32", + }, + ] + "### + ); + } + + #[test] + fn test_struct_field_completion_self() { + assert_debug_snapshot!( + do_ref_completion( + r" + struct A { + /// This is the_field + the_field: (u32,) + } + impl A { + fn foo(self) { + self.<|> + } + } + ", + ), + @r###" + [ + CompletionItem { + label: "foo()", + source_range: [187; 187), + delete: [187; 187), + insert: "foo()$0", + kind: Method, + lookup: "foo", + detail: "fn foo(self)", + }, + CompletionItem { + label: "the_field", + source_range: [187; 187), + delete: [187; 187), + insert: "the_field", + kind: Field, + detail: "(u32,)", + documentation: Documentation( + "This is the_field", + ), + }, + ] + "### + ); + } + + #[test] + fn test_struct_field_completion_autoderef() { + assert_debug_snapshot!( + do_ref_completion( + r" + struct A { the_field: (u32, i32) } + impl A { + fn foo(&self) { + self.<|> + } + } + ", + ), + @r###" + [ + CompletionItem { + label: "foo()", + source_range: [126; 126), + delete: [126; 126), + insert: "foo()$0", + kind: Method, + lookup: "foo", + detail: "fn foo(&self)", + }, + CompletionItem { + label: "the_field", + source_range: [126; 126), + delete: [126; 126), + insert: "the_field", + kind: Field, + detail: "(u32, i32)", + }, + ] + "### + ); + } + + #[test] + fn test_no_struct_field_completion_for_method_call() { + assert_debug_snapshot!( + do_ref_completion( + r" + struct A { the_field: u32 } + fn foo(a: A) { + a.<|>() + } + ", + ), + @"[]" + ); + } + + #[test] + fn test_method_completion() { + assert_debug_snapshot!( + do_ref_completion( + r" + struct A {} + impl A { + fn the_method(&self) {} + } + fn foo(a: A) { + a.<|> + } + ", + ), + @r###" + [ + CompletionItem { + label: "the_method()", + source_range: [144; 144), + delete: [144; 144), + insert: "the_method()$0", + kind: Method, + lookup: "the_method", + detail: "fn the_method(&self)", + }, + ] + "### + ); + } + + #[test] + fn test_trait_method_completion() { + assert_debug_snapshot!( + do_ref_completion( + r" + struct A {} + trait Trait { fn the_method(&self); } + impl Trait for A {} + fn foo(a: A) { + a.<|> + } + ", + ), + @r###" + [ + CompletionItem { + label: "the_method()", + source_range: [151; 151), + delete: [151; 151), + insert: "the_method()$0", + kind: Method, + lookup: "the_method", + detail: "fn the_method(&self)", + }, + ] + "### + ); + } + + #[test] + fn test_trait_method_completion_deduplicated() { + assert_debug_snapshot!( + do_ref_completion( + r" + struct A {} + trait Trait { fn the_method(&self); } + impl Trait for T {} + fn foo(a: &A) { + a.<|> + } + ", + ), + @r###" + [ + CompletionItem { + label: "the_method()", + source_range: [155; 155), + delete: [155; 155), + insert: "the_method()$0", + kind: Method, + lookup: "the_method", + detail: "fn the_method(&self)", + }, + ] + "### + ); + } + + #[test] + fn test_no_non_self_method() { + assert_debug_snapshot!( + do_ref_completion( + r" + struct A {} + impl A { + fn the_method() {} + } + fn foo(a: A) { + a.<|> + } + ", + ), + @"[]" + ); + } + + #[test] + fn test_method_attr_filtering() { + assert_debug_snapshot!( + do_ref_completion( + r" + struct A {} + impl A { + #[inline] + fn the_method(&self) { + let x = 1; + let y = 2; + } + } + fn foo(a: A) { + a.<|> + } + ", + ), + @r###" + [ + CompletionItem { + label: "the_method()", + source_range: [249; 249), + delete: [249; 249), + insert: "the_method()$0", + kind: Method, + lookup: "the_method", + detail: "fn the_method(&self)", + }, + ] + "### + ); + } + + #[test] + fn test_tuple_field_completion() { + assert_debug_snapshot!( + do_ref_completion( + r" + fn foo() { + let b = (0, 3.14); + b.<|> + } + ", + ), + @r###" + [ + CompletionItem { + label: "0", + source_range: [75; 75), + delete: [75; 75), + insert: "0", + kind: Field, + detail: "i32", + }, + CompletionItem { + label: "1", + source_range: [75; 75), + delete: [75; 75), + insert: "1", + kind: Field, + detail: "f64", + }, + ] + "### + ); + } + + #[test] + fn test_tuple_field_inference() { + assert_debug_snapshot!( + do_ref_completion( + r" + pub struct S; + impl S { + pub fn blah(&self) {} + } + + struct T(S); + + impl T { + fn foo(&self) { + // FIXME: This doesn't work without the trailing `a` as `0.` is a float + self.0.a<|> + } + } + ", + ), + @r###" + [ + CompletionItem { + label: "blah()", + source_range: [299; 300), + delete: [299; 300), + insert: "blah()$0", + kind: Method, + lookup: "blah", + detail: "pub fn blah(&self)", + }, + ] + "### + ); + } + + #[test] + fn test_completion_works_in_consts() { + assert_debug_snapshot!( + do_ref_completion( + r" + struct A { the_field: u32 } + const X: u32 = { + A { the_field: 92 }.<|> + }; + ", + ), + @r###" + [ + CompletionItem { + label: "the_field", + source_range: [106; 106), + delete: [106; 106), + insert: "the_field", + kind: Field, + detail: "u32", + }, + ] + "### + ); + } + + #[test] + fn test_completion_await_impls_future() { + assert_debug_snapshot!( + do_completion( + r###" + //- /main.rs + use std::future::*; + struct A {} + impl Future for A {} + fn foo(a: A) { + a.<|> + } + + //- /std/lib.rs + pub mod future { + pub trait Future {} + } + "###, CompletionKind::Keyword), + @r###" + [ + CompletionItem { + label: "await", + source_range: [74; 74), + delete: [74; 74), + insert: "await", + detail: "expr.await", + }, + ] + "### + ) + } +} diff --git a/crates/ra_ide/src/completion/complete_fn_param.rs b/crates/ra_ide/src/completion/complete_fn_param.rs new file mode 100644 index 000000000..502458706 --- /dev/null +++ b/crates/ra_ide/src/completion/complete_fn_param.rs @@ -0,0 +1,136 @@ +//! FIXME: write short doc here + +use ra_syntax::{ast, match_ast, AstNode}; +use rustc_hash::FxHashMap; + +use crate::completion::{CompletionContext, CompletionItem, CompletionKind, Completions}; + +/// Complete repeated parameters, both name and type. For example, if all +/// functions in a file have a `spam: &mut Spam` parameter, a completion with +/// `spam: &mut Spam` insert text/label and `spam` lookup string will be +/// suggested. +pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) { + if !ctx.is_param { + return; + } + + let mut params = FxHashMap::default(); + for node in ctx.token.parent().ancestors() { + match_ast! { + match node { + ast::SourceFile(it) => { process(it, &mut params) }, + ast::ItemList(it) => { process(it, &mut params) }, + _ => (), + } + } + } + params + .into_iter() + .filter_map(|(label, (count, param))| { + let lookup = param.pat()?.syntax().text().to_string(); + if count < 2 { + None + } else { + Some((label, lookup)) + } + }) + .for_each(|(label, lookup)| { + CompletionItem::new(CompletionKind::Magic, ctx.source_range(), label) + .lookup_by(lookup) + .add_to(acc) + }); + + fn process(node: N, params: &mut FxHashMap) { + node.functions().filter_map(|it| it.param_list()).flat_map(|it| it.params()).for_each( + |param| { + let text = param.syntax().text().to_string(); + params.entry(text).or_insert((0, param)).0 += 1; + }, + ) + } +} + +#[cfg(test)] +mod tests { + use crate::completion::{do_completion, CompletionItem, CompletionKind}; + use insta::assert_debug_snapshot; + + fn do_magic_completion(code: &str) -> Vec { + do_completion(code, CompletionKind::Magic) + } + + #[test] + fn test_param_completion_last_param() { + assert_debug_snapshot!( + do_magic_completion( + r" + fn foo(file_id: FileId) {} + fn bar(file_id: FileId) {} + fn baz(file<|>) {} + ", + ), + @r###" + [ + CompletionItem { + label: "file_id: FileId", + source_range: [110; 114), + delete: [110; 114), + insert: "file_id: FileId", + lookup: "file_id", + }, + ] + "### + ); + } + + #[test] + fn test_param_completion_nth_param() { + assert_debug_snapshot!( + do_magic_completion( + r" + fn foo(file_id: FileId) {} + fn bar(file_id: FileId) {} + fn baz(file<|>, x: i32) {} + ", + ), + @r###" + [ + CompletionItem { + label: "file_id: FileId", + source_range: [110; 114), + delete: [110; 114), + insert: "file_id: FileId", + lookup: "file_id", + }, + ] + "### + ); + } + + #[test] + fn test_param_completion_trait_param() { + assert_debug_snapshot!( + do_magic_completion( + r" + pub(crate) trait SourceRoot { + pub fn contains(&self, file_id: FileId) -> bool; + pub fn module_map(&self) -> &ModuleMap; + pub fn lines(&self, file_id: FileId) -> &LineIndex; + pub fn syntax(&self, file<|>) + } + ", + ), + @r###" + [ + CompletionItem { + label: "file_id: FileId", + source_range: [289; 293), + delete: [289; 293), + insert: "file_id: FileId", + lookup: "file_id", + }, + ] + "### + ); + } +} diff --git a/crates/ra_ide/src/completion/complete_keyword.rs b/crates/ra_ide/src/completion/complete_keyword.rs new file mode 100644 index 000000000..eb7cd9ac2 --- /dev/null +++ b/crates/ra_ide/src/completion/complete_keyword.rs @@ -0,0 +1,781 @@ +//! FIXME: write short doc here + +use ra_syntax::{ + ast::{self, LoopBodyOwner}, + match_ast, AstNode, + SyntaxKind::*, + SyntaxToken, +}; + +use crate::completion::{ + CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions, +}; + +pub(super) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionContext) { + // complete keyword "crate" in use stmt + let source_range = ctx.source_range(); + match (ctx.use_item_syntax.as_ref(), ctx.path_prefix.as_ref()) { + (Some(_), None) => { + CompletionItem::new(CompletionKind::Keyword, source_range, "crate") + .kind(CompletionItemKind::Keyword) + .insert_text("crate::") + .add_to(acc); + CompletionItem::new(CompletionKind::Keyword, source_range, "self") + .kind(CompletionItemKind::Keyword) + .add_to(acc); + CompletionItem::new(CompletionKind::Keyword, source_range, "super") + .kind(CompletionItemKind::Keyword) + .insert_text("super::") + .add_to(acc); + } + (Some(_), Some(_)) => { + CompletionItem::new(CompletionKind::Keyword, source_range, "self") + .kind(CompletionItemKind::Keyword) + .add_to(acc); + CompletionItem::new(CompletionKind::Keyword, source_range, "super") + .kind(CompletionItemKind::Keyword) + .insert_text("super::") + .add_to(acc); + } + _ => {} + } +} + +fn keyword(ctx: &CompletionContext, kw: &str, snippet: &str) -> CompletionItem { + CompletionItem::new(CompletionKind::Keyword, ctx.source_range(), kw) + .kind(CompletionItemKind::Keyword) + .insert_snippet(snippet) + .build() +} + +pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) { + if !ctx.is_trivial_path { + return; + } + + let fn_def = match &ctx.function_syntax { + Some(it) => it, + None => return, + }; + acc.add(keyword(ctx, "if", "if $0 {}")); + acc.add(keyword(ctx, "match", "match $0 {}")); + acc.add(keyword(ctx, "while", "while $0 {}")); + acc.add(keyword(ctx, "loop", "loop {$0}")); + + if ctx.after_if { + acc.add(keyword(ctx, "else", "else {$0}")); + acc.add(keyword(ctx, "else if", "else if $0 {}")); + } + if is_in_loop_body(&ctx.token) { + if ctx.can_be_stmt { + acc.add(keyword(ctx, "continue", "continue;")); + acc.add(keyword(ctx, "break", "break;")); + } else { + acc.add(keyword(ctx, "continue", "continue")); + acc.add(keyword(ctx, "break", "break")); + } + } + acc.add_all(complete_return(ctx, &fn_def, ctx.can_be_stmt)); +} + +fn is_in_loop_body(leaf: &SyntaxToken) -> bool { + for node in leaf.parent().ancestors() { + if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR { + break; + } + let loop_body = match_ast! { + match node { + ast::ForExpr(it) => { it.loop_body() }, + ast::WhileExpr(it) => { it.loop_body() }, + ast::LoopExpr(it) => { it.loop_body() }, + _ => None, + } + }; + if let Some(body) = loop_body { + if leaf.text_range().is_subrange(&body.syntax().text_range()) { + return true; + } + } + } + false +} + +fn complete_return( + ctx: &CompletionContext, + fn_def: &ast::FnDef, + can_be_stmt: bool, +) -> Option { + let snip = match (can_be_stmt, fn_def.ret_type().is_some()) { + (true, true) => "return $0;", + (true, false) => "return;", + (false, true) => "return $0", + (false, false) => "return", + }; + Some(keyword(ctx, "return", snip)) +} + +#[cfg(test)] +mod tests { + use crate::completion::{do_completion, CompletionItem, CompletionKind}; + use insta::assert_debug_snapshot; + + fn do_keyword_completion(code: &str) -> Vec { + do_completion(code, CompletionKind::Keyword) + } + + #[test] + fn completes_keywords_in_use_stmt() { + assert_debug_snapshot!( + do_keyword_completion( + r" + use <|> + ", + ), + @r###" + [ + CompletionItem { + label: "crate", + source_range: [21; 21), + delete: [21; 21), + insert: "crate::", + kind: Keyword, + }, + CompletionItem { + label: "self", + source_range: [21; 21), + delete: [21; 21), + insert: "self", + kind: Keyword, + }, + CompletionItem { + label: "super", + source_range: [21; 21), + delete: [21; 21), + insert: "super::", + kind: Keyword, + }, + ] + "### + ); + + assert_debug_snapshot!( + do_keyword_completion( + r" + use a::<|> + ", + ), + @r###" + [ + CompletionItem { + label: "self", + source_range: [24; 24), + delete: [24; 24), + insert: "self", + kind: Keyword, + }, + CompletionItem { + label: "super", + source_range: [24; 24), + delete: [24; 24), + insert: "super::", + kind: Keyword, + }, + ] + "### + ); + + assert_debug_snapshot!( + do_keyword_completion( + r" + use a::{b, <|>} + ", + ), + @r###" + [ + CompletionItem { + label: "self", + source_range: [28; 28), + delete: [28; 28), + insert: "self", + kind: Keyword, + }, + CompletionItem { + label: "super", + source_range: [28; 28), + delete: [28; 28), + insert: "super::", + kind: Keyword, + }, + ] + "### + ); + } + + #[test] + fn completes_various_keywords_in_function() { + assert_debug_snapshot!( + do_keyword_completion( + r" + fn quux() { + <|> + } + ", + ), + @r###" + [ + CompletionItem { + label: "if", + source_range: [49; 49), + delete: [49; 49), + insert: "if $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "loop", + source_range: [49; 49), + delete: [49; 49), + insert: "loop {$0}", + kind: Keyword, + }, + CompletionItem { + label: "match", + source_range: [49; 49), + delete: [49; 49), + insert: "match $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "return", + source_range: [49; 49), + delete: [49; 49), + insert: "return;", + kind: Keyword, + }, + CompletionItem { + label: "while", + source_range: [49; 49), + delete: [49; 49), + insert: "while $0 {}", + kind: Keyword, + }, + ] + "### + ); + } + + #[test] + fn completes_else_after_if() { + assert_debug_snapshot!( + do_keyword_completion( + r" + fn quux() { + if true { + () + } <|> + } + ", + ), + @r###" + [ + CompletionItem { + label: "else", + source_range: [108; 108), + delete: [108; 108), + insert: "else {$0}", + kind: Keyword, + }, + CompletionItem { + label: "else if", + source_range: [108; 108), + delete: [108; 108), + insert: "else if $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "if", + source_range: [108; 108), + delete: [108; 108), + insert: "if $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "loop", + source_range: [108; 108), + delete: [108; 108), + insert: "loop {$0}", + kind: Keyword, + }, + CompletionItem { + label: "match", + source_range: [108; 108), + delete: [108; 108), + insert: "match $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "return", + source_range: [108; 108), + delete: [108; 108), + insert: "return;", + kind: Keyword, + }, + CompletionItem { + label: "while", + source_range: [108; 108), + delete: [108; 108), + insert: "while $0 {}", + kind: Keyword, + }, + ] + "### + ); + } + + #[test] + fn test_completion_return_value() { + assert_debug_snapshot!( + do_keyword_completion( + r" + fn quux() -> i32 { + <|> + 92 + } + ", + ), + @r###" + [ + CompletionItem { + label: "if", + source_range: [56; 56), + delete: [56; 56), + insert: "if $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "loop", + source_range: [56; 56), + delete: [56; 56), + insert: "loop {$0}", + kind: Keyword, + }, + CompletionItem { + label: "match", + source_range: [56; 56), + delete: [56; 56), + insert: "match $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "return", + source_range: [56; 56), + delete: [56; 56), + insert: "return $0;", + kind: Keyword, + }, + CompletionItem { + label: "while", + source_range: [56; 56), + delete: [56; 56), + insert: "while $0 {}", + kind: Keyword, + }, + ] + "### + ); + assert_debug_snapshot!( + do_keyword_completion( + r" + fn quux() { + <|> + 92 + } + ", + ), + @r###" + [ + CompletionItem { + label: "if", + source_range: [49; 49), + delete: [49; 49), + insert: "if $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "loop", + source_range: [49; 49), + delete: [49; 49), + insert: "loop {$0}", + kind: Keyword, + }, + CompletionItem { + label: "match", + source_range: [49; 49), + delete: [49; 49), + insert: "match $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "return", + source_range: [49; 49), + delete: [49; 49), + insert: "return;", + kind: Keyword, + }, + CompletionItem { + label: "while", + source_range: [49; 49), + delete: [49; 49), + insert: "while $0 {}", + kind: Keyword, + }, + ] + "### + ); + } + + #[test] + fn dont_add_semi_after_return_if_not_a_statement() { + assert_debug_snapshot!( + do_keyword_completion( + r" + fn quux() -> i32 { + match () { + () => <|> + } + } + ", + ), + @r###" + [ + CompletionItem { + label: "if", + source_range: [97; 97), + delete: [97; 97), + insert: "if $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "loop", + source_range: [97; 97), + delete: [97; 97), + insert: "loop {$0}", + kind: Keyword, + }, + CompletionItem { + label: "match", + source_range: [97; 97), + delete: [97; 97), + insert: "match $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "return", + source_range: [97; 97), + delete: [97; 97), + insert: "return $0", + kind: Keyword, + }, + CompletionItem { + label: "while", + source_range: [97; 97), + delete: [97; 97), + insert: "while $0 {}", + kind: Keyword, + }, + ] + "### + ); + } + + #[test] + fn last_return_in_block_has_semi() { + assert_debug_snapshot!( + do_keyword_completion( + r" + fn quux() -> i32 { + if condition { + <|> + } + } + ", + ), + @r###" + [ + CompletionItem { + label: "if", + source_range: [95; 95), + delete: [95; 95), + insert: "if $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "loop", + source_range: [95; 95), + delete: [95; 95), + insert: "loop {$0}", + kind: Keyword, + }, + CompletionItem { + label: "match", + source_range: [95; 95), + delete: [95; 95), + insert: "match $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "return", + source_range: [95; 95), + delete: [95; 95), + insert: "return $0;", + kind: Keyword, + }, + CompletionItem { + label: "while", + source_range: [95; 95), + delete: [95; 95), + insert: "while $0 {}", + kind: Keyword, + }, + ] + "### + ); + assert_debug_snapshot!( + do_keyword_completion( + r" + fn quux() -> i32 { + if condition { + <|> + } + let x = 92; + x + } + ", + ), + @r###" + [ + CompletionItem { + label: "if", + source_range: [95; 95), + delete: [95; 95), + insert: "if $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "loop", + source_range: [95; 95), + delete: [95; 95), + insert: "loop {$0}", + kind: Keyword, + }, + CompletionItem { + label: "match", + source_range: [95; 95), + delete: [95; 95), + insert: "match $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "return", + source_range: [95; 95), + delete: [95; 95), + insert: "return $0;", + kind: Keyword, + }, + CompletionItem { + label: "while", + source_range: [95; 95), + delete: [95; 95), + insert: "while $0 {}", + kind: Keyword, + }, + ] + "### + ); + } + + #[test] + fn completes_break_and_continue_in_loops() { + assert_debug_snapshot!( + do_keyword_completion( + r" + fn quux() -> i32 { + loop { <|> } + } + ", + ), + @r###" + [ + CompletionItem { + label: "break", + source_range: [63; 63), + delete: [63; 63), + insert: "break;", + kind: Keyword, + }, + CompletionItem { + label: "continue", + source_range: [63; 63), + delete: [63; 63), + insert: "continue;", + kind: Keyword, + }, + CompletionItem { + label: "if", + source_range: [63; 63), + delete: [63; 63), + insert: "if $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "loop", + source_range: [63; 63), + delete: [63; 63), + insert: "loop {$0}", + kind: Keyword, + }, + CompletionItem { + label: "match", + source_range: [63; 63), + delete: [63; 63), + insert: "match $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "return", + source_range: [63; 63), + delete: [63; 63), + insert: "return $0;", + kind: Keyword, + }, + CompletionItem { + label: "while", + source_range: [63; 63), + delete: [63; 63), + insert: "while $0 {}", + kind: Keyword, + }, + ] + "### + ); + + // No completion: lambda isolates control flow + assert_debug_snapshot!( + do_keyword_completion( + r" + fn quux() -> i32 { + loop { || { <|> } } + } + ", + ), + @r###" + [ + CompletionItem { + label: "if", + source_range: [68; 68), + delete: [68; 68), + insert: "if $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "loop", + source_range: [68; 68), + delete: [68; 68), + insert: "loop {$0}", + kind: Keyword, + }, + CompletionItem { + label: "match", + source_range: [68; 68), + delete: [68; 68), + insert: "match $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "return", + source_range: [68; 68), + delete: [68; 68), + insert: "return $0;", + kind: Keyword, + }, + CompletionItem { + label: "while", + source_range: [68; 68), + delete: [68; 68), + insert: "while $0 {}", + kind: Keyword, + }, + ] + "### + ); + } + + #[test] + fn no_semi_after_break_continue_in_expr() { + assert_debug_snapshot!( + do_keyword_completion( + r" + fn f() { + loop { + match () { + () => br<|> + } + } + } + ", + ), + @r###" + [ + CompletionItem { + label: "break", + source_range: [122; 124), + delete: [122; 124), + insert: "break", + kind: Keyword, + }, + CompletionItem { + label: "continue", + source_range: [122; 124), + delete: [122; 124), + insert: "continue", + kind: Keyword, + }, + CompletionItem { + label: "if", + source_range: [122; 124), + delete: [122; 124), + insert: "if $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "loop", + source_range: [122; 124), + delete: [122; 124), + insert: "loop {$0}", + kind: Keyword, + }, + CompletionItem { + label: "match", + source_range: [122; 124), + delete: [122; 124), + insert: "match $0 {}", + kind: Keyword, + }, + CompletionItem { + label: "return", + source_range: [122; 124), + delete: [122; 124), + insert: "return", + kind: Keyword, + }, + CompletionItem { + label: "while", + source_range: [122; 124), + delete: [122; 124), + insert: "while $0 {}", + kind: Keyword, + }, + ] + "### + ) + } +} diff --git a/crates/ra_ide/src/completion/complete_macro_in_item_position.rs b/crates/ra_ide/src/completion/complete_macro_in_item_position.rs new file mode 100644 index 000000000..faadd1e3f --- /dev/null +++ b/crates/ra_ide/src/completion/complete_macro_in_item_position.rs @@ -0,0 +1,143 @@ +//! FIXME: write short doc here + +use crate::completion::{CompletionContext, Completions}; + +pub(super) fn complete_macro_in_item_position(acc: &mut Completions, ctx: &CompletionContext) { + // Show only macros in top level. + if ctx.is_new_item { + ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { + if let hir::ScopeDef::MacroDef(mac) = res { + acc.add_macro(ctx, Some(name.to_string()), mac); + } + }) + } +} + +#[cfg(test)] +mod tests { + use crate::completion::{do_completion, CompletionItem, CompletionKind}; + use insta::assert_debug_snapshot; + + fn do_reference_completion(code: &str) -> Vec { + do_completion(code, CompletionKind::Reference) + } + + #[test] + fn completes_macros_as_item() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /main.rs + macro_rules! foo { + () => {} + } + + fn foo() {} + + <|> + " + ), + @r###" + [ + CompletionItem { + label: "foo!", + source_range: [46; 46), + delete: [46; 46), + insert: "foo!($0)", + kind: Macro, + detail: "macro_rules! foo", + }, + ] + "### + ); + } + + #[test] + fn completes_vec_macros_with_square_brackets() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /main.rs + /// Creates a [`Vec`] containing the arguments. + /// + /// - Create a [`Vec`] containing a given list of elements: + /// + /// ``` + /// let v = vec![1, 2, 3]; + /// assert_eq!(v[0], 1); + /// assert_eq!(v[1], 2); + /// assert_eq!(v[2], 3); + /// ``` + macro_rules! vec { + () => {} + } + + fn foo() {} + + <|> + " + ), + @r###" + [ + CompletionItem { + label: "vec!", + source_range: [280; 280), + delete: [280; 280), + insert: "vec![$0]", + kind: Macro, + detail: "macro_rules! vec", + documentation: Documentation( + "Creates a [`Vec`] containing the arguments.\n\n- Create a [`Vec`] containing a given list of elements:\n\n```\nlet v = vec![1, 2, 3];\nassert_eq!(v[0], 1);\nassert_eq!(v[1], 2);\nassert_eq!(v[2], 3);\n```", + ), + }, + ] + "### + ); + } + + #[test] + fn completes_macros_braces_guessing() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /main.rs + /// Foo + /// + /// Not call `fooo!()` `fooo!()`, or `_foo![]` `_foo![]`. + /// Call as `let _=foo! { hello world };` + macro_rules! foo { + () => {} + } + + fn main() { + <|> + } + " + ), + @r###" + [ + CompletionItem { + label: "foo!", + source_range: [163; 163), + delete: [163; 163), + insert: "foo! {$0}", + kind: Macro, + detail: "macro_rules! foo", + documentation: Documentation( + "Foo\n\nNot call `fooo!()` `fooo!()`, or `_foo![]` `_foo![]`.\nCall as `let _=foo! { hello world };`", + ), + }, + CompletionItem { + label: "main()", + source_range: [163; 163), + delete: [163; 163), + insert: "main()$0", + kind: Function, + lookup: "main", + detail: "fn main()", + }, + ] + "### + ); + } +} diff --git a/crates/ra_ide/src/completion/complete_path.rs b/crates/ra_ide/src/completion/complete_path.rs new file mode 100644 index 000000000..89e0009a1 --- /dev/null +++ b/crates/ra_ide/src/completion/complete_path.rs @@ -0,0 +1,785 @@ +//! FIXME: write short doc here + +use hir::{Adt, Either, HasSource, PathResolution}; +use ra_syntax::AstNode; +use test_utils::tested_by; + +use crate::completion::{CompletionContext, Completions}; + +pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) { + let path = match &ctx.path_prefix { + Some(path) => path.clone(), + _ => return, + }; + let def = match ctx.analyzer.resolve_hir_path(ctx.db, &path) { + Some(PathResolution::Def(def)) => def, + _ => return, + }; + match def { + hir::ModuleDef::Module(module) => { + let module_scope = module.scope(ctx.db); + for (name, def, import) in module_scope { + if let hir::ScopeDef::ModuleDef(hir::ModuleDef::BuiltinType(..)) = def { + if ctx.use_item_syntax.is_some() { + tested_by!(dont_complete_primitive_in_use); + continue; + } + } + if Some(module) == ctx.module { + if let Some(import) = import { + if let Either::A(use_tree) = import.source(ctx.db).value { + if use_tree.syntax().text_range().contains_inclusive(ctx.offset) { + // for `use self::foo<|>`, don't suggest `foo` as a completion + tested_by!(dont_complete_current_use); + continue; + } + } + } + } + acc.add_resolution(ctx, name.to_string(), &def); + } + } + hir::ModuleDef::Adt(_) | hir::ModuleDef::TypeAlias(_) => { + if let hir::ModuleDef::Adt(Adt::Enum(e)) = def { + for variant in e.variants(ctx.db) { + acc.add_enum_variant(ctx, variant); + } + } + let ty = match def { + hir::ModuleDef::Adt(adt) => adt.ty(ctx.db), + hir::ModuleDef::TypeAlias(a) => a.ty(ctx.db), + _ => unreachable!(), + }; + ctx.analyzer.iterate_path_candidates(ctx.db, &ty, None, |_ty, item| { + match item { + hir::AssocItem::Function(func) => { + if !func.has_self_param(ctx.db) { + acc.add_function(ctx, func); + } + } + hir::AssocItem::Const(ct) => acc.add_const(ctx, ct), + hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), + } + None::<()> + }); + // Iterate assoc types separately + // FIXME: complete T::AssocType + let krate = ctx.module.map(|m| m.krate()); + if let Some(krate) = krate { + ty.iterate_impl_items(ctx.db, krate, |item| { + match item { + hir::AssocItem::Function(_) | hir::AssocItem::Const(_) => {} + hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), + } + None::<()> + }); + } + } + hir::ModuleDef::Trait(t) => { + for item in t.items(ctx.db) { + match item { + hir::AssocItem::Function(func) => { + if !func.has_self_param(ctx.db) { + acc.add_function(ctx, func); + } + } + hir::AssocItem::Const(ct) => acc.add_const(ctx, ct), + hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), + } + } + } + _ => {} + }; +} + +#[cfg(test)] +mod tests { + use test_utils::covers; + + use crate::completion::{do_completion, CompletionItem, CompletionKind}; + use insta::assert_debug_snapshot; + + fn do_reference_completion(code: &str) -> Vec { + do_completion(code, CompletionKind::Reference) + } + + #[test] + fn dont_complete_current_use() { + covers!(dont_complete_current_use); + let completions = do_completion(r"use self::foo<|>;", CompletionKind::Reference); + assert!(completions.is_empty()); + } + + #[test] + fn dont_complete_current_use_in_braces_with_glob() { + let completions = do_completion( + r" + mod foo { pub struct S; } + use self::{foo::*, bar<|>}; + ", + CompletionKind::Reference, + ); + assert_eq!(completions.len(), 2); + } + + #[test] + fn dont_complete_primitive_in_use() { + covers!(dont_complete_primitive_in_use); + let completions = do_completion(r"use self::<|>;", CompletionKind::BuiltinType); + assert!(completions.is_empty()); + } + + #[test] + fn completes_primitives() { + let completions = + do_completion(r"fn main() { let _: <|> = 92; }", CompletionKind::BuiltinType); + assert_eq!(completions.len(), 17); + } + + #[test] + fn completes_mod_with_docs() { + assert_debug_snapshot!( + do_reference_completion( + r" + use self::my<|>; + + /// Some simple + /// docs describing `mod my`. + mod my { + struct Bar; + } + " + ), + @r###" + [ + CompletionItem { + label: "my", + source_range: [27; 29), + delete: [27; 29), + insert: "my", + kind: Module, + documentation: Documentation( + "Some simple\ndocs describing `mod my`.", + ), + }, + ] + "### + ); + } + + #[test] + fn completes_use_item_starting_with_self() { + assert_debug_snapshot!( + do_reference_completion( + r" + use self::m::<|>; + + mod m { + struct Bar; + } + " + ), + @r###" + [ + CompletionItem { + label: "Bar", + source_range: [30; 30), + delete: [30; 30), + insert: "Bar", + kind: Struct, + }, + ] + "### + ); + } + + #[test] + fn completes_use_item_starting_with_crate() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /lib.rs + mod foo; + struct Spam; + //- /foo.rs + use crate::Sp<|> + " + ), + @r###" + [ + CompletionItem { + label: "Spam", + source_range: [11; 13), + delete: [11; 13), + insert: "Spam", + kind: Struct, + }, + CompletionItem { + label: "foo", + source_range: [11; 13), + delete: [11; 13), + insert: "foo", + kind: Module, + }, + ] + "### + ); + } + + #[test] + fn completes_nested_use_tree() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /lib.rs + mod foo; + struct Spam; + //- /foo.rs + use crate::{Sp<|>}; + " + ), + @r###" + [ + CompletionItem { + label: "Spam", + source_range: [12; 14), + delete: [12; 14), + insert: "Spam", + kind: Struct, + }, + CompletionItem { + label: "foo", + source_range: [12; 14), + delete: [12; 14), + insert: "foo", + kind: Module, + }, + ] + "### + ); + } + + #[test] + fn completes_deeply_nested_use_tree() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /lib.rs + mod foo; + pub mod bar { + pub mod baz { + pub struct Spam; + } + } + //- /foo.rs + use crate::{bar::{baz::Sp<|>}}; + " + ), + @r###" + [ + CompletionItem { + label: "Spam", + source_range: [23; 25), + delete: [23; 25), + insert: "Spam", + kind: Struct, + }, + ] + "### + ); + } + + #[test] + fn completes_enum_variant() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /lib.rs + /// An enum + enum E { + /// Foo Variant + Foo, + /// Bar Variant with i32 + Bar(i32) + } + fn foo() { let _ = E::<|> } + " + ), + @r###" + [ + CompletionItem { + label: "Bar", + source_range: [116; 116), + delete: [116; 116), + insert: "Bar", + kind: EnumVariant, + detail: "(i32)", + documentation: Documentation( + "Bar Variant with i32", + ), + }, + CompletionItem { + label: "Foo", + source_range: [116; 116), + delete: [116; 116), + insert: "Foo", + kind: EnumVariant, + detail: "()", + documentation: Documentation( + "Foo Variant", + ), + }, + ] + "### + ); + } + + #[test] + fn completes_enum_variant_with_details() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /lib.rs + struct S { field: u32 } + /// An enum + enum E { + /// Foo Variant (empty) + Foo, + /// Bar Variant with i32 and u32 + Bar(i32, u32), + /// + S(S), + } + fn foo() { let _ = E::<|> } + " + ), + @r###" + [ + CompletionItem { + label: "Bar", + source_range: [180; 180), + delete: [180; 180), + insert: "Bar", + kind: EnumVariant, + detail: "(i32, u32)", + documentation: Documentation( + "Bar Variant with i32 and u32", + ), + }, + CompletionItem { + label: "Foo", + source_range: [180; 180), + delete: [180; 180), + insert: "Foo", + kind: EnumVariant, + detail: "()", + documentation: Documentation( + "Foo Variant (empty)", + ), + }, + CompletionItem { + label: "S", + source_range: [180; 180), + delete: [180; 180), + insert: "S", + kind: EnumVariant, + detail: "(S)", + documentation: Documentation( + "", + ), + }, + ] + "### + ); + } + + #[test] + fn completes_struct_associated_method() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /lib.rs + /// A Struct + struct S; + + impl S { + /// An associated method + fn m() { } + } + + fn foo() { let _ = S::<|> } + " + ), + @r###" + [ + CompletionItem { + label: "m()", + source_range: [100; 100), + delete: [100; 100), + insert: "m()$0", + kind: Function, + lookup: "m", + detail: "fn m()", + documentation: Documentation( + "An associated method", + ), + }, + ] + "### + ); + } + + #[test] + fn completes_struct_associated_const() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /lib.rs + /// A Struct + struct S; + + impl S { + /// An associated const + const C: i32 = 42; + } + + fn foo() { let _ = S::<|> } + " + ), + @r###" + [ + CompletionItem { + label: "C", + source_range: [107; 107), + delete: [107; 107), + insert: "C", + kind: Const, + detail: "const C: i32 = 42;", + documentation: Documentation( + "An associated const", + ), + }, + ] + "### + ); + } + + #[test] + fn completes_struct_associated_type() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /lib.rs + /// A Struct + struct S; + + impl S { + /// An associated type + type T = i32; + } + + fn foo() { let _ = S::<|> } + " + ), + @r###" + [ + CompletionItem { + label: "T", + source_range: [101; 101), + delete: [101; 101), + insert: "T", + kind: TypeAlias, + detail: "type T = i32;", + documentation: Documentation( + "An associated type", + ), + }, + ] + "### + ); + } + + #[test] + fn completes_enum_associated_method() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /lib.rs + /// An enum + enum S {}; + + impl S { + /// An associated method + fn m() { } + } + + fn foo() { let _ = S::<|> } + " + ), + @r###" + [ + CompletionItem { + label: "m()", + source_range: [100; 100), + delete: [100; 100), + insert: "m()$0", + kind: Function, + lookup: "m", + detail: "fn m()", + documentation: Documentation( + "An associated method", + ), + }, + ] + "### + ); + } + + #[test] + fn completes_union_associated_method() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /lib.rs + /// A union + union U {}; + + impl U { + /// An associated method + fn m() { } + } + + fn foo() { let _ = U::<|> } + " + ), + @r###" + [ + CompletionItem { + label: "m()", + source_range: [101; 101), + delete: [101; 101), + insert: "m()$0", + kind: Function, + lookup: "m", + detail: "fn m()", + documentation: Documentation( + "An associated method", + ), + }, + ] + "### + ); + } + + #[test] + fn completes_use_paths_across_crates() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /main.rs + use foo::<|>; + + //- /foo/lib.rs + pub mod bar { + pub struct S; + } + " + ), + @r###" + [ + CompletionItem { + label: "bar", + source_range: [9; 9), + delete: [9; 9), + insert: "bar", + kind: Module, + }, + ] + "### + ); + } + + #[test] + fn completes_trait_associated_method_1() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /lib.rs + trait Trait { + /// A trait method + fn m(); + } + + fn foo() { let _ = Trait::<|> } + " + ), + @r###" + [ + CompletionItem { + label: "m()", + source_range: [73; 73), + delete: [73; 73), + insert: "m()$0", + kind: Function, + lookup: "m", + detail: "fn m()", + documentation: Documentation( + "A trait method", + ), + }, + ] + "### + ); + } + + #[test] + fn completes_trait_associated_method_2() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /lib.rs + trait Trait { + /// A trait method + fn m(); + } + + struct S; + impl Trait for S {} + + fn foo() { let _ = S::<|> } + " + ), + @r###" + [ + CompletionItem { + label: "m()", + source_range: [99; 99), + delete: [99; 99), + insert: "m()$0", + kind: Function, + lookup: "m", + detail: "fn m()", + documentation: Documentation( + "A trait method", + ), + }, + ] + "### + ); + } + + #[test] + fn completes_trait_associated_method_3() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /lib.rs + trait Trait { + /// A trait method + fn m(); + } + + struct S; + impl Trait for S {} + + fn foo() { let _ = ::<|> } + " + ), + @r###" + [ + CompletionItem { + label: "m()", + source_range: [110; 110), + delete: [110; 110), + insert: "m()$0", + kind: Function, + lookup: "m", + detail: "fn m()", + documentation: Documentation( + "A trait method", + ), + }, + ] + "### + ); + } + + #[test] + fn completes_type_alias() { + assert_debug_snapshot!( + do_reference_completion( + " + struct S; + impl S { fn foo() {} } + type T = S; + impl T { fn bar() {} } + + fn main() { + T::<|>; + } + " + ), + @r###" + [ + CompletionItem { + label: "bar()", + source_range: [185; 185), + delete: [185; 185), + insert: "bar()$0", + kind: Function, + lookup: "bar", + detail: "fn bar()", + }, + CompletionItem { + label: "foo()", + source_range: [185; 185), + delete: [185; 185), + insert: "foo()$0", + kind: Function, + lookup: "foo", + detail: "fn foo()", + }, + ] + "### + ); + } + + #[test] + fn completes_qualified_macros() { + assert_debug_snapshot!( + do_reference_completion( + " + #[macro_export] + macro_rules! foo { + () => {} + } + + fn main() { + let _ = crate::<|> + } + " + ), + @r###" + [ + CompletionItem { + label: "foo!", + source_range: [179; 179), + delete: [179; 179), + insert: "foo!($0)", + kind: Macro, + detail: "#[macro_export]\nmacro_rules! foo", + }, + CompletionItem { + label: "main()", + source_range: [179; 179), + delete: [179; 179), + insert: "main()$0", + kind: Function, + lookup: "main", + detail: "fn main()", + }, + ] + "### + ); + } +} diff --git a/crates/ra_ide/src/completion/complete_pattern.rs b/crates/ra_ide/src/completion/complete_pattern.rs new file mode 100644 index 000000000..fd03b1c40 --- /dev/null +++ b/crates/ra_ide/src/completion/complete_pattern.rs @@ -0,0 +1,89 @@ +//! FIXME: write short doc here + +use crate::completion::{CompletionContext, Completions}; + +/// Completes constats and paths in patterns. +pub(super) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) { + if !ctx.is_pat_binding { + return; + } + // FIXME: ideally, we should look at the type we are matching against and + // suggest variants + auto-imports + ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { + let def = match &res { + hir::ScopeDef::ModuleDef(def) => def, + _ => return, + }; + match def { + hir::ModuleDef::Adt(hir::Adt::Enum(..)) + | hir::ModuleDef::EnumVariant(..) + | hir::ModuleDef::Const(..) + | hir::ModuleDef::Module(..) => (), + _ => return, + } + acc.add_resolution(ctx, name.to_string(), &res) + }); +} + +#[cfg(test)] +mod tests { + use crate::completion::{do_completion, CompletionItem, CompletionKind}; + use insta::assert_debug_snapshot; + + fn complete(code: &str) -> Vec { + do_completion(code, CompletionKind::Reference) + } + + #[test] + fn completes_enum_variants_and_modules() { + let completions = complete( + r" + enum E { X } + use self::E::X; + const Z: E = E::X; + mod m {} + + static FOO: E = E::X; + struct Bar { f: u32 } + + fn foo() { + match E::X { + <|> + } + } + ", + ); + assert_debug_snapshot!(completions, @r###" + [ + CompletionItem { + label: "E", + source_range: [246; 246), + delete: [246; 246), + insert: "E", + kind: Enum, + }, + CompletionItem { + label: "X", + source_range: [246; 246), + delete: [246; 246), + insert: "X", + kind: EnumVariant, + }, + CompletionItem { + label: "Z", + source_range: [246; 246), + delete: [246; 246), + insert: "Z", + kind: Const, + }, + CompletionItem { + label: "m", + source_range: [246; 246), + delete: [246; 246), + insert: "m", + kind: Module, + }, + ] + "###); + } +} diff --git a/crates/ra_ide/src/completion/complete_postfix.rs b/crates/ra_ide/src/completion/complete_postfix.rs new file mode 100644 index 000000000..646a30c76 --- /dev/null +++ b/crates/ra_ide/src/completion/complete_postfix.rs @@ -0,0 +1,282 @@ +//! FIXME: write short doc here + +use ra_syntax::{ast::AstNode, TextRange, TextUnit}; +use ra_text_edit::TextEdit; + +use crate::{ + completion::{ + completion_context::CompletionContext, + completion_item::{Builder, CompletionKind, Completions}, + }, + CompletionItem, +}; + +pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { + if ctx.db.feature_flags.get("completion.enable-postfix") == false { + return; + } + + let dot_receiver = match &ctx.dot_receiver { + Some(it) => it, + None => return, + }; + + let receiver_text = if ctx.dot_receiver_is_ambiguous_float_literal { + let text = dot_receiver.syntax().text(); + let without_dot = ..text.len() - TextUnit::of_char('.'); + text.slice(without_dot).to_string() + } else { + dot_receiver.syntax().text().to_string() + }; + + let receiver_ty = match ctx.analyzer.type_of(ctx.db, &dot_receiver) { + Some(it) => it, + None => return, + }; + + if receiver_ty.is_bool() || receiver_ty.is_unknown() { + postfix_snippet(ctx, "if", "if expr {}", &format!("if {} {{$0}}", receiver_text)) + .add_to(acc); + postfix_snippet( + ctx, + "while", + "while expr {}", + &format!("while {} {{\n$0\n}}", receiver_text), + ) + .add_to(acc); + } + + postfix_snippet(ctx, "not", "!expr", &format!("!{}", receiver_text)).add_to(acc); + + postfix_snippet(ctx, "ref", "&expr", &format!("&{}", receiver_text)).add_to(acc); + postfix_snippet(ctx, "refm", "&mut expr", &format!("&mut {}", receiver_text)).add_to(acc); + + postfix_snippet( + ctx, + "match", + "match expr {}", + &format!("match {} {{\n ${{1:_}} => {{$0\\}},\n}}", receiver_text), + ) + .add_to(acc); + + postfix_snippet(ctx, "dbg", "dbg!(expr)", &format!("dbg!({})", receiver_text)).add_to(acc); + + postfix_snippet(ctx, "box", "Box::new(expr)", &format!("Box::new({})", receiver_text)) + .add_to(acc); +} + +fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder { + let edit = { + let receiver_range = + ctx.dot_receiver.as_ref().expect("no receiver available").syntax().text_range(); + let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end()); + TextEdit::replace(delete_range, snippet.to_string()) + }; + CompletionItem::new(CompletionKind::Postfix, ctx.source_range(), label) + .detail(detail) + .snippet_edit(edit) +} + +#[cfg(test)] +mod tests { + use insta::assert_debug_snapshot; + + use crate::completion::{do_completion, CompletionItem, CompletionKind}; + + fn do_postfix_completion(code: &str) -> Vec { + do_completion(code, CompletionKind::Postfix) + } + + #[test] + fn postfix_completion_works_for_trivial_path_expression() { + assert_debug_snapshot!( + do_postfix_completion( + r#" + fn main() { + let bar = true; + bar.<|> + } + "#, + ), + @r###" + [ + CompletionItem { + label: "box", + source_range: [89; 89), + delete: [85; 89), + insert: "Box::new(bar)", + detail: "Box::new(expr)", + }, + CompletionItem { + label: "dbg", + source_range: [89; 89), + delete: [85; 89), + insert: "dbg!(bar)", + detail: "dbg!(expr)", + }, + CompletionItem { + label: "if", + source_range: [89; 89), + delete: [85; 89), + insert: "if bar {$0}", + detail: "if expr {}", + }, + CompletionItem { + label: "match", + source_range: [89; 89), + delete: [85; 89), + insert: "match bar {\n ${1:_} => {$0\\},\n}", + detail: "match expr {}", + }, + CompletionItem { + label: "not", + source_range: [89; 89), + delete: [85; 89), + insert: "!bar", + detail: "!expr", + }, + CompletionItem { + label: "ref", + source_range: [89; 89), + delete: [85; 89), + insert: "&bar", + detail: "&expr", + }, + CompletionItem { + label: "refm", + source_range: [89; 89), + delete: [85; 89), + insert: "&mut bar", + detail: "&mut expr", + }, + CompletionItem { + label: "while", + source_range: [89; 89), + delete: [85; 89), + insert: "while bar {\n$0\n}", + detail: "while expr {}", + }, + ] + "### + ); + } + + #[test] + fn some_postfix_completions_ignored() { + assert_debug_snapshot!( + do_postfix_completion( + r#" + fn main() { + let bar: u8 = 12; + bar.<|> + } + "#, + ), + @r###" + [ + CompletionItem { + label: "box", + source_range: [91; 91), + delete: [87; 91), + insert: "Box::new(bar)", + detail: "Box::new(expr)", + }, + CompletionItem { + label: "dbg", + source_range: [91; 91), + delete: [87; 91), + insert: "dbg!(bar)", + detail: "dbg!(expr)", + }, + CompletionItem { + label: "match", + source_range: [91; 91), + delete: [87; 91), + insert: "match bar {\n ${1:_} => {$0\\},\n}", + detail: "match expr {}", + }, + CompletionItem { + label: "not", + source_range: [91; 91), + delete: [87; 91), + insert: "!bar", + detail: "!expr", + }, + CompletionItem { + label: "ref", + source_range: [91; 91), + delete: [87; 91), + insert: "&bar", + detail: "&expr", + }, + CompletionItem { + label: "refm", + source_range: [91; 91), + delete: [87; 91), + insert: "&mut bar", + detail: "&mut expr", + }, + ] + "### + ); + } + + #[test] + fn postfix_completion_works_for_ambiguous_float_literal() { + assert_debug_snapshot!( + do_postfix_completion( + r#" + fn main() { + 42.<|> + } + "#, + ), + @r###" + [ + CompletionItem { + label: "box", + source_range: [52; 52), + delete: [49; 52), + insert: "Box::new(42)", + detail: "Box::new(expr)", + }, + CompletionItem { + label: "dbg", + source_range: [52; 52), + delete: [49; 52), + insert: "dbg!(42)", + detail: "dbg!(expr)", + }, + CompletionItem { + label: "match", + source_range: [52; 52), + delete: [49; 52), + insert: "match 42 {\n ${1:_} => {$0\\},\n}", + detail: "match expr {}", + }, + CompletionItem { + label: "not", + source_range: [52; 52), + delete: [49; 52), + insert: "!42", + detail: "!expr", + }, + CompletionItem { + label: "ref", + source_range: [52; 52), + delete: [49; 52), + insert: "&42", + detail: "&expr", + }, + CompletionItem { + label: "refm", + source_range: [52; 52), + delete: [49; 52), + insert: "&mut 42", + detail: "&mut expr", + }, + ] + "### + ); + } +} diff --git a/crates/ra_ide/src/completion/complete_record_literal.rs b/crates/ra_ide/src/completion/complete_record_literal.rs new file mode 100644 index 000000000..577c394d2 --- /dev/null +++ b/crates/ra_ide/src/completion/complete_record_literal.rs @@ -0,0 +1,159 @@ +//! FIXME: write short doc here + +use crate::completion::{CompletionContext, Completions}; + +/// Complete fields in fields literals. +pub(super) fn complete_record_literal(acc: &mut Completions, ctx: &CompletionContext) { + let (ty, variant) = match ctx.record_lit_syntax.as_ref().and_then(|it| { + Some(( + ctx.analyzer.type_of(ctx.db, &it.clone().into())?, + ctx.analyzer.resolve_record_literal(it)?, + )) + }) { + Some(it) => it, + _ => return, + }; + + for (field, field_ty) in ty.variant_fields(ctx.db, variant) { + acc.add_field(ctx, field, &field_ty); + } +} + +#[cfg(test)] +mod tests { + use crate::completion::{do_completion, CompletionItem, CompletionKind}; + use insta::assert_debug_snapshot; + + fn complete(code: &str) -> Vec { + do_completion(code, CompletionKind::Reference) + } + + #[test] + fn test_record_literal_deprecated_field() { + let completions = complete( + r" + struct A { + #[deprecated] + the_field: u32, + } + fn foo() { + A { the<|> } + } + ", + ); + assert_debug_snapshot!(completions, @r###" + [ + CompletionItem { + label: "the_field", + source_range: [142; 145), + delete: [142; 145), + insert: "the_field", + kind: Field, + detail: "u32", + deprecated: true, + }, + ] + "###); + } + + #[test] + fn test_record_literal_field() { + let completions = complete( + r" + struct A { the_field: u32 } + fn foo() { + A { the<|> } + } + ", + ); + assert_debug_snapshot!(completions, @r###" + [ + CompletionItem { + label: "the_field", + source_range: [83; 86), + delete: [83; 86), + insert: "the_field", + kind: Field, + detail: "u32", + }, + ] + "###); + } + + #[test] + fn test_record_literal_enum_variant() { + let completions = complete( + r" + enum E { + A { a: u32 } + } + fn foo() { + let _ = E::A { <|> } + } + ", + ); + assert_debug_snapshot!(completions, @r###" + [ + CompletionItem { + label: "a", + source_range: [119; 119), + delete: [119; 119), + insert: "a", + kind: Field, + detail: "u32", + }, + ] + "###); + } + + #[test] + fn test_record_literal_two_structs() { + let completions = complete( + r" + struct A { a: u32 } + struct B { b: u32 } + + fn foo() { + let _: A = B { <|> } + } + ", + ); + assert_debug_snapshot!(completions, @r###" + [ + CompletionItem { + label: "b", + source_range: [119; 119), + delete: [119; 119), + insert: "b", + kind: Field, + detail: "u32", + }, + ] + "###); + } + + #[test] + fn test_record_literal_generic_struct() { + let completions = complete( + r" + struct A { a: T } + + fn foo() { + let _: A = A { <|> } + } + ", + ); + assert_debug_snapshot!(completions, @r###" + [ + CompletionItem { + label: "a", + source_range: [93; 93), + delete: [93; 93), + insert: "a", + kind: Field, + detail: "u32", + }, + ] + "###); + } +} diff --git a/crates/ra_ide/src/completion/complete_record_pattern.rs b/crates/ra_ide/src/completion/complete_record_pattern.rs new file mode 100644 index 000000000..a56c7e3a1 --- /dev/null +++ b/crates/ra_ide/src/completion/complete_record_pattern.rs @@ -0,0 +1,93 @@ +//! FIXME: write short doc here + +use crate::completion::{CompletionContext, Completions}; + +pub(super) fn complete_record_pattern(acc: &mut Completions, ctx: &CompletionContext) { + let (ty, variant) = match ctx.record_lit_pat.as_ref().and_then(|it| { + Some(( + ctx.analyzer.type_of_pat(ctx.db, &it.clone().into())?, + ctx.analyzer.resolve_record_pattern(it)?, + )) + }) { + Some(it) => it, + _ => return, + }; + + for (field, field_ty) in ty.variant_fields(ctx.db, variant) { + acc.add_field(ctx, field, &field_ty); + } +} + +#[cfg(test)] +mod tests { + use crate::completion::{do_completion, CompletionItem, CompletionKind}; + use insta::assert_debug_snapshot; + + fn complete(code: &str) -> Vec { + do_completion(code, CompletionKind::Reference) + } + + #[test] + fn test_record_pattern_field() { + let completions = complete( + r" + struct S { foo: u32 } + + fn process(f: S) { + match f { + S { f<|>: 92 } => (), + } + } + ", + ); + assert_debug_snapshot!(completions, @r###" + [ + CompletionItem { + label: "foo", + source_range: [117; 118), + delete: [117; 118), + insert: "foo", + kind: Field, + detail: "u32", + }, + ] + "###); + } + + #[test] + fn test_record_pattern_enum_variant() { + let completions = complete( + r" + enum E { + S { foo: u32, bar: () } + } + + fn process(e: E) { + match e { + E::S { <|> } => (), + } + } + ", + ); + assert_debug_snapshot!(completions, @r###" + [ + CompletionItem { + label: "bar", + source_range: [161; 161), + delete: [161; 161), + insert: "bar", + kind: Field, + detail: "()", + }, + CompletionItem { + label: "foo", + source_range: [161; 161), + delete: [161; 161), + insert: "foo", + kind: Field, + detail: "u32", + }, + ] + "###); + } +} diff --git a/crates/ra_ide/src/completion/complete_scope.rs b/crates/ra_ide/src/completion/complete_scope.rs new file mode 100644 index 000000000..d5739b58a --- /dev/null +++ b/crates/ra_ide/src/completion/complete_scope.rs @@ -0,0 +1,876 @@ +//! FIXME: write short doc here + +use ra_assists::auto_import_text_edit; +use ra_syntax::{ast, AstNode, SmolStr}; +use ra_text_edit::TextEditBuilder; +use rustc_hash::FxHashMap; + +use crate::completion::{CompletionContext, CompletionItem, CompletionKind, Completions}; + +pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) { + if !ctx.is_trivial_path { + return; + } + + ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { + acc.add_resolution(ctx, name.to_string(), &res) + }); + + // auto-import + // We fetch ident from the original file, because we need to pre-filter auto-imports + if ast::NameRef::cast(ctx.token.parent()).is_some() { + let import_resolver = ImportResolver::new(); + let import_names = import_resolver.all_names(ctx.token.text()); + import_names.into_iter().for_each(|(name, path)| { + let edit = { + let mut builder = TextEditBuilder::default(); + builder.replace(ctx.source_range(), name.to_string()); + auto_import_text_edit( + &ctx.token.parent(), + &ctx.token.parent(), + &path, + &mut builder, + ); + builder.finish() + }; + + // Hack: copied this check form conv.rs beacause auto import can produce edits + // that invalidate assert in conv_with. + if edit + .as_atoms() + .iter() + .filter(|atom| !ctx.source_range().is_subrange(&atom.delete)) + .all(|atom| ctx.source_range().intersection(&atom.delete).is_none()) + { + CompletionItem::new( + CompletionKind::Reference, + ctx.source_range(), + build_import_label(&name, &path), + ) + .text_edit(edit) + .add_to(acc); + } + }); + } +} + +fn build_import_label(name: &str, path: &[SmolStr]) -> String { + let mut buf = String::with_capacity(64); + buf.push_str(name); + buf.push_str(" ("); + fmt_import_path(path, &mut buf); + buf.push_str(")"); + buf +} + +fn fmt_import_path(path: &[SmolStr], buf: &mut String) { + let mut segments = path.iter(); + if let Some(s) = segments.next() { + buf.push_str(&s); + } + for s in segments { + buf.push_str("::"); + buf.push_str(&s); + } +} + +#[derive(Debug, Clone, Default)] +pub(crate) struct ImportResolver { + // todo: use fst crate or something like that + dummy_names: Vec<(SmolStr, Vec)>, +} + +impl ImportResolver { + pub(crate) fn new() -> Self { + let dummy_names = vec![ + (SmolStr::new("fmt"), vec![SmolStr::new("std"), SmolStr::new("fmt")]), + (SmolStr::new("io"), vec![SmolStr::new("std"), SmolStr::new("io")]), + (SmolStr::new("iter"), vec![SmolStr::new("std"), SmolStr::new("iter")]), + (SmolStr::new("hash"), vec![SmolStr::new("std"), SmolStr::new("hash")]), + ( + SmolStr::new("Debug"), + vec![SmolStr::new("std"), SmolStr::new("fmt"), SmolStr::new("Debug")], + ), + ( + SmolStr::new("Display"), + vec![SmolStr::new("std"), SmolStr::new("fmt"), SmolStr::new("Display")], + ), + ( + SmolStr::new("Hash"), + vec![SmolStr::new("std"), SmolStr::new("hash"), SmolStr::new("Hash")], + ), + ( + SmolStr::new("Hasher"), + vec![SmolStr::new("std"), SmolStr::new("hash"), SmolStr::new("Hasher")], + ), + ( + SmolStr::new("Iterator"), + vec![SmolStr::new("std"), SmolStr::new("iter"), SmolStr::new("Iterator")], + ), + ]; + + ImportResolver { dummy_names } + } + + // Returns a map of importable items filtered by name. + // The map associates item name with its full path. + // todo: should return Resolutions + pub(crate) fn all_names(&self, name: &str) -> FxHashMap> { + if name.len() > 1 { + self.dummy_names.iter().filter(|(n, _)| n.contains(name)).cloned().collect() + } else { + FxHashMap::default() + } + } +} + +#[cfg(test)] +mod tests { + use crate::completion::{do_completion, CompletionItem, CompletionKind}; + use insta::assert_debug_snapshot; + + fn do_reference_completion(code: &str) -> Vec { + do_completion(code, CompletionKind::Reference) + } + + #[test] + fn completes_bindings_from_let() { + assert_debug_snapshot!( + do_reference_completion( + r" + fn quux(x: i32) { + let y = 92; + 1 + <|>; + let z = (); + } + " + ), + @r###" + [ + CompletionItem { + label: "quux(…)", + source_range: [91; 91), + delete: [91; 91), + insert: "quux($0)", + kind: Function, + lookup: "quux", + detail: "fn quux(x: i32)", + }, + CompletionItem { + label: "x", + source_range: [91; 91), + delete: [91; 91), + insert: "x", + kind: Binding, + detail: "i32", + }, + CompletionItem { + label: "y", + source_range: [91; 91), + delete: [91; 91), + insert: "y", + kind: Binding, + detail: "i32", + }, + ] + "### + ); + } + + #[test] + fn completes_bindings_from_if_let() { + assert_debug_snapshot!( + do_reference_completion( + r" + fn quux() { + if let Some(x) = foo() { + let y = 92; + }; + if let Some(a) = bar() { + let b = 62; + 1 + <|> + } + } + " + ), + @r###" + [ + CompletionItem { + label: "a", + source_range: [242; 242), + delete: [242; 242), + insert: "a", + kind: Binding, + }, + CompletionItem { + label: "b", + source_range: [242; 242), + delete: [242; 242), + insert: "b", + kind: Binding, + detail: "i32", + }, + CompletionItem { + label: "quux()", + source_range: [242; 242), + delete: [242; 242), + insert: "quux()$0", + kind: Function, + lookup: "quux", + detail: "fn quux()", + }, + ] + "### + ); + } + + #[test] + fn completes_bindings_from_for() { + assert_debug_snapshot!( + do_reference_completion( + r" + fn quux() { + for x in &[1, 2, 3] { + <|> + } + } + " + ), + @r###" + [ + CompletionItem { + label: "quux()", + source_range: [95; 95), + delete: [95; 95), + insert: "quux()$0", + kind: Function, + lookup: "quux", + detail: "fn quux()", + }, + CompletionItem { + label: "x", + source_range: [95; 95), + delete: [95; 95), + insert: "x", + kind: Binding, + }, + ] + "### + ); + } + + #[test] + fn completes_generic_params() { + assert_debug_snapshot!( + do_reference_completion( + r" + fn quux() { + <|> + } + " + ), + @r###" + [ + CompletionItem { + label: "T", + source_range: [52; 52), + delete: [52; 52), + insert: "T", + kind: TypeParam, + }, + CompletionItem { + label: "quux()", + source_range: [52; 52), + delete: [52; 52), + insert: "quux()$0", + kind: Function, + lookup: "quux", + detail: "fn quux()", + }, + ] + "### + ); + } + + #[test] + fn completes_generic_params_in_struct() { + assert_debug_snapshot!( + do_reference_completion( + r" + struct X { + x: <|> + } + " + ), + @r###" + [ + CompletionItem { + label: "Self", + source_range: [54; 54), + delete: [54; 54), + insert: "Self", + kind: TypeParam, + }, + CompletionItem { + label: "T", + source_range: [54; 54), + delete: [54; 54), + insert: "T", + kind: TypeParam, + }, + CompletionItem { + label: "X<…>", + source_range: [54; 54), + delete: [54; 54), + insert: "X<$0>", + kind: Struct, + lookup: "X", + }, + ] + "### + ); + } + + #[test] + fn completes_self_in_enum() { + assert_debug_snapshot!( + do_reference_completion( + r" + enum X { + Y(<|>) + } + " + ), + @r###" + [ + CompletionItem { + label: "Self", + source_range: [48; 48), + delete: [48; 48), + insert: "Self", + kind: TypeParam, + }, + CompletionItem { + label: "X", + source_range: [48; 48), + delete: [48; 48), + insert: "X", + kind: Enum, + }, + ] + "### + ); + } + + #[test] + fn completes_module_items() { + assert_debug_snapshot!( + do_reference_completion( + r" + struct Foo; + enum Baz {} + fn quux() { + <|> + } + " + ), + @r###" + [ + CompletionItem { + label: "Baz", + source_range: [105; 105), + delete: [105; 105), + insert: "Baz", + kind: Enum, + }, + CompletionItem { + label: "Foo", + source_range: [105; 105), + delete: [105; 105), + insert: "Foo", + kind: Struct, + }, + CompletionItem { + label: "quux()", + source_range: [105; 105), + delete: [105; 105), + insert: "quux()$0", + kind: Function, + lookup: "quux", + detail: "fn quux()", + }, + ] + "### + ); + } + + #[test] + fn completes_extern_prelude() { + assert_debug_snapshot!( + do_reference_completion( + r" + //- /lib.rs + use <|>; + + //- /other_crate/lib.rs + // nothing here + " + ), + @r###" + [ + CompletionItem { + label: "other_crate", + source_range: [4; 4), + delete: [4; 4), + insert: "other_crate", + kind: Module, + }, + ] + "### + ); + } + + #[test] + fn completes_module_items_in_nested_modules() { + assert_debug_snapshot!( + do_reference_completion( + r" + struct Foo; + mod m { + struct Bar; + fn quux() { <|> } + } + " + ), + @r###" + [ + CompletionItem { + label: "Bar", + source_range: [117; 117), + delete: [117; 117), + insert: "Bar", + kind: Struct, + }, + CompletionItem { + label: "quux()", + source_range: [117; 117), + delete: [117; 117), + insert: "quux()$0", + kind: Function, + lookup: "quux", + detail: "fn quux()", + }, + ] + "### + ); + } + + #[test] + fn completes_return_type() { + assert_debug_snapshot!( + do_reference_completion( + r" + struct Foo; + fn x() -> <|> + " + ), + @r###" + [ + CompletionItem { + label: "Foo", + source_range: [55; 55), + delete: [55; 55), + insert: "Foo", + kind: Struct, + }, + CompletionItem { + label: "x()", + source_range: [55; 55), + delete: [55; 55), + insert: "x()$0", + kind: Function, + lookup: "x", + detail: "fn x()", + }, + ] + "### + ); + } + + #[test] + fn dont_show_both_completions_for_shadowing() { + assert_debug_snapshot!( + do_reference_completion( + r" + fn foo() { + let bar = 92; + { + let bar = 62; + <|> + } + } + " + ), + @r###" + [ + CompletionItem { + label: "bar", + source_range: [146; 146), + delete: [146; 146), + insert: "bar", + kind: Binding, + detail: "i32", + }, + CompletionItem { + label: "foo()", + source_range: [146; 146), + delete: [146; 146), + insert: "foo()$0", + kind: Function, + lookup: "foo", + detail: "fn foo()", + }, + ] + "### + ); + } + + #[test] + fn completes_self_in_methods() { + assert_debug_snapshot!( + do_reference_completion(r"impl S { fn foo(&self) { <|> } }"), + @r###" + [ + CompletionItem { + label: "Self", + source_range: [25; 25), + delete: [25; 25), + insert: "Self", + kind: TypeParam, + }, + CompletionItem { + label: "self", + source_range: [25; 25), + delete: [25; 25), + insert: "self", + kind: Binding, + detail: "&{unknown}", + }, + ] + "### + ); + } + + #[test] + fn completes_prelude() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /main.rs + fn foo() { let x: <|> } + + //- /std/lib.rs + #[prelude_import] + use prelude::*; + + mod prelude { + struct Option; + } + " + ), + @r###" + [ + CompletionItem { + label: "Option", + source_range: [18; 18), + delete: [18; 18), + insert: "Option", + kind: Struct, + }, + CompletionItem { + label: "foo()", + source_range: [18; 18), + delete: [18; 18), + insert: "foo()$0", + kind: Function, + lookup: "foo", + detail: "fn foo()", + }, + CompletionItem { + label: "std", + source_range: [18; 18), + delete: [18; 18), + insert: "std", + kind: Module, + }, + ] + "### + ); + } + + #[test] + fn completes_std_prelude_if_core_is_defined() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /main.rs + fn foo() { let x: <|> } + + //- /core/lib.rs + #[prelude_import] + use prelude::*; + + mod prelude { + struct Option; + } + + //- /std/lib.rs + #[prelude_import] + use prelude::*; + + mod prelude { + struct String; + } + " + ), + @r###" + [ + CompletionItem { + label: "String", + source_range: [18; 18), + delete: [18; 18), + insert: "String", + kind: Struct, + }, + CompletionItem { + label: "core", + source_range: [18; 18), + delete: [18; 18), + insert: "core", + kind: Module, + }, + CompletionItem { + label: "foo()", + source_range: [18; 18), + delete: [18; 18), + insert: "foo()$0", + kind: Function, + lookup: "foo", + detail: "fn foo()", + }, + CompletionItem { + label: "std", + source_range: [18; 18), + delete: [18; 18), + insert: "std", + kind: Module, + }, + ] + "### + ); + } + + #[test] + fn completes_macros_as_value() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /main.rs + macro_rules! foo { + () => {} + } + + #[macro_use] + mod m1 { + macro_rules! bar { + () => {} + } + } + + mod m2 { + macro_rules! nope { + () => {} + } + + #[macro_export] + macro_rules! baz { + () => {} + } + } + + fn main() { + let v = <|> + } + " + ), + @r###" + [ + CompletionItem { + label: "bar!", + source_range: [252; 252), + delete: [252; 252), + insert: "bar!($0)", + kind: Macro, + detail: "macro_rules! bar", + }, + CompletionItem { + label: "baz!", + source_range: [252; 252), + delete: [252; 252), + insert: "baz!($0)", + kind: Macro, + detail: "#[macro_export]\nmacro_rules! baz", + }, + CompletionItem { + label: "foo!", + source_range: [252; 252), + delete: [252; 252), + insert: "foo!($0)", + kind: Macro, + detail: "macro_rules! foo", + }, + CompletionItem { + label: "m1", + source_range: [252; 252), + delete: [252; 252), + insert: "m1", + kind: Module, + }, + CompletionItem { + label: "m2", + source_range: [252; 252), + delete: [252; 252), + insert: "m2", + kind: Module, + }, + CompletionItem { + label: "main()", + source_range: [252; 252), + delete: [252; 252), + insert: "main()$0", + kind: Function, + lookup: "main", + detail: "fn main()", + }, + ] + "### + ); + } + + #[test] + fn completes_both_macro_and_value() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /main.rs + macro_rules! foo { + () => {} + } + + fn foo() { + <|> + } + " + ), + @r###" + [ + CompletionItem { + label: "foo!", + source_range: [49; 49), + delete: [49; 49), + insert: "foo!($0)", + kind: Macro, + detail: "macro_rules! foo", + }, + CompletionItem { + label: "foo()", + source_range: [49; 49), + delete: [49; 49), + insert: "foo()$0", + kind: Function, + lookup: "foo", + detail: "fn foo()", + }, + ] + "### + ); + } + + #[test] + fn completes_macros_as_type() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /main.rs + macro_rules! foo { + () => {} + } + + fn main() { + let x: <|> + } + " + ), + @r###" + [ + CompletionItem { + label: "foo!", + source_range: [57; 57), + delete: [57; 57), + insert: "foo!($0)", + kind: Macro, + detail: "macro_rules! foo", + }, + CompletionItem { + label: "main()", + source_range: [57; 57), + delete: [57; 57), + insert: "main()$0", + kind: Function, + lookup: "main", + detail: "fn main()", + }, + ] + "### + ); + } + + #[test] + fn completes_macros_as_stmt() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /main.rs + macro_rules! foo { + () => {} + } + + fn main() { + <|> + } + " + ), + @r###" + [ + CompletionItem { + label: "foo!", + source_range: [50; 50), + delete: [50; 50), + insert: "foo!($0)", + kind: Macro, + detail: "macro_rules! foo", + }, + CompletionItem { + label: "main()", + source_range: [50; 50), + delete: [50; 50), + insert: "main()$0", + kind: Function, + lookup: "main", + detail: "fn main()", + }, + ] + "### + ); + } +} diff --git a/crates/ra_ide/src/completion/complete_snippet.rs b/crates/ra_ide/src/completion/complete_snippet.rs new file mode 100644 index 000000000..1f2988b36 --- /dev/null +++ b/crates/ra_ide/src/completion/complete_snippet.rs @@ -0,0 +1,120 @@ +//! FIXME: write short doc here + +use crate::completion::{ + completion_item::Builder, CompletionContext, CompletionItem, CompletionItemKind, + CompletionKind, Completions, +}; + +fn snippet(ctx: &CompletionContext, label: &str, snippet: &str) -> Builder { + CompletionItem::new(CompletionKind::Snippet, ctx.source_range(), label) + .insert_snippet(snippet) + .kind(CompletionItemKind::Snippet) +} + +pub(super) fn complete_expr_snippet(acc: &mut Completions, ctx: &CompletionContext) { + if !(ctx.is_trivial_path && ctx.function_syntax.is_some()) { + return; + } + + snippet(ctx, "pd", "eprintln!(\"$0 = {:?}\", $0);").add_to(acc); + snippet(ctx, "ppd", "eprintln!(\"$0 = {:#?}\", $0);").add_to(acc); +} + +pub(super) fn complete_item_snippet(acc: &mut Completions, ctx: &CompletionContext) { + if !ctx.is_new_item { + return; + } + snippet( + ctx, + "Test function", + "\ +#[test] +fn ${1:feature}() { + $0 +}", + ) + .lookup_by("tfn") + .add_to(acc); + + snippet(ctx, "pub(crate)", "pub(crate) $0").add_to(acc); +} + +#[cfg(test)] +mod tests { + use crate::completion::{do_completion, CompletionItem, CompletionKind}; + use insta::assert_debug_snapshot; + + fn do_snippet_completion(code: &str) -> Vec { + do_completion(code, CompletionKind::Snippet) + } + + #[test] + fn completes_snippets_in_expressions() { + assert_debug_snapshot!( + do_snippet_completion(r"fn foo(x: i32) { <|> }"), + @r###" + [ + CompletionItem { + label: "pd", + source_range: [17; 17), + delete: [17; 17), + insert: "eprintln!(\"$0 = {:?}\", $0);", + kind: Snippet, + }, + CompletionItem { + label: "ppd", + source_range: [17; 17), + delete: [17; 17), + insert: "eprintln!(\"$0 = {:#?}\", $0);", + kind: Snippet, + }, + ] + "### + ); + } + + #[test] + fn should_not_complete_snippets_in_path() { + assert_debug_snapshot!( + do_snippet_completion(r"fn foo(x: i32) { ::foo<|> }"), + @"[]" + ); + assert_debug_snapshot!( + do_snippet_completion(r"fn foo(x: i32) { ::<|> }"), + @"[]" + ); + } + + #[test] + fn completes_snippets_in_items() { + assert_debug_snapshot!( + do_snippet_completion( + r" + #[cfg(test)] + mod tests { + <|> + } + " + ), + @r###" + [ + CompletionItem { + label: "Test function", + source_range: [78; 78), + delete: [78; 78), + insert: "#[test]\nfn ${1:feature}() {\n $0\n}", + kind: Snippet, + lookup: "tfn", + }, + CompletionItem { + label: "pub(crate)", + source_range: [78; 78), + delete: [78; 78), + insert: "pub(crate) $0", + kind: Snippet, + }, + ] + "### + ); + } +} diff --git a/crates/ra_ide/src/completion/completion_context.rs b/crates/ra_ide/src/completion/completion_context.rs new file mode 100644 index 000000000..b8345c91d --- /dev/null +++ b/crates/ra_ide/src/completion/completion_context.rs @@ -0,0 +1,274 @@ +//! FIXME: write short doc here + +use ra_syntax::{ + algo::{find_covering_element, find_node_at_offset}, + ast, AstNode, Parse, SourceFile, + SyntaxKind::*, + SyntaxNode, SyntaxToken, TextRange, TextUnit, +}; +use ra_text_edit::AtomTextEdit; + +use crate::{db, FilePosition}; + +/// `CompletionContext` is created early during completion to figure out, where +/// exactly is the cursor, syntax-wise. +#[derive(Debug)] +pub(crate) struct CompletionContext<'a> { + pub(super) db: &'a db::RootDatabase, + pub(super) analyzer: hir::SourceAnalyzer, + pub(super) offset: TextUnit, + pub(super) token: SyntaxToken, + pub(super) module: Option, + pub(super) function_syntax: Option, + pub(super) use_item_syntax: Option, + pub(super) record_lit_syntax: Option, + pub(super) record_lit_pat: Option, + pub(super) is_param: bool, + /// If a name-binding or reference to a const in a pattern. + /// Irrefutable patterns (like let) are excluded. + pub(super) is_pat_binding: bool, + /// A single-indent path, like `foo`. `::foo` should not be considered a trivial path. + pub(super) is_trivial_path: bool, + /// If not a trivial path, the prefix (qualifier). + pub(super) path_prefix: Option, + pub(super) after_if: bool, + /// `true` if we are a statement or a last expr in the block. + pub(super) can_be_stmt: bool, + /// Something is typed at the "top" level, in module or impl/trait. + pub(super) is_new_item: bool, + /// The receiver if this is a field or method access, i.e. writing something.<|> + pub(super) dot_receiver: Option, + pub(super) dot_receiver_is_ambiguous_float_literal: bool, + /// If this is a call (method or function) in particular, i.e. the () are already there. + pub(super) is_call: bool, + pub(super) is_path_type: bool, + pub(super) has_type_args: bool, +} + +impl<'a> CompletionContext<'a> { + pub(super) fn new( + db: &'a db::RootDatabase, + original_parse: &'a Parse, + position: FilePosition, + ) -> Option> { + let src = hir::ModuleSource::from_position(db, position); + let module = hir::Module::from_definition( + db, + hir::Source { file_id: position.file_id.into(), value: src }, + ); + let token = + original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?; + let analyzer = hir::SourceAnalyzer::new( + db, + hir::Source::new(position.file_id.into(), &token.parent()), + Some(position.offset), + ); + let mut ctx = CompletionContext { + db, + analyzer, + token, + offset: position.offset, + module, + function_syntax: None, + use_item_syntax: None, + record_lit_syntax: None, + record_lit_pat: None, + is_param: false, + is_pat_binding: false, + is_trivial_path: false, + path_prefix: None, + after_if: false, + can_be_stmt: false, + is_new_item: false, + dot_receiver: None, + is_call: false, + is_path_type: false, + has_type_args: false, + dot_receiver_is_ambiguous_float_literal: false, + }; + ctx.fill(&original_parse, position.offset); + Some(ctx) + } + + // The range of the identifier that is being completed. + pub(crate) fn source_range(&self) -> TextRange { + match self.token.kind() { + // workaroud when completion is triggered by trigger characters. + IDENT => self.token.text_range(), + _ => TextRange::offset_len(self.offset, 0.into()), + } + } + + fn fill(&mut self, original_parse: &'a Parse, offset: TextUnit) { + // Insert a fake ident to get a valid parse tree. We will use this file + // to determine context, though the original_file will be used for + // actual completion. + let file = { + let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string()); + original_parse.reparse(&edit).tree() + }; + + // First, let's try to complete a reference to some declaration. + if let Some(name_ref) = find_node_at_offset::(file.syntax(), offset) { + // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. + // See RFC#1685. + if is_node::(name_ref.syntax()) { + self.is_param = true; + return; + } + self.classify_name_ref(original_parse.tree(), name_ref); + } + + // Otherwise, see if this is a declaration. We can use heuristics to + // suggest declaration names, see `CompletionKind::Magic`. + if let Some(name) = find_node_at_offset::(file.syntax(), offset) { + if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) { + let parent = bind_pat.syntax().parent(); + if parent.clone().and_then(ast::MatchArm::cast).is_some() + || parent.and_then(ast::Condition::cast).is_some() + { + self.is_pat_binding = true; + } + } + if is_node::(name.syntax()) { + self.is_param = true; + return; + } + if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() { + self.record_lit_pat = + find_node_at_offset(original_parse.tree().syntax(), self.offset); + } + } + } + + fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) { + let name_range = name_ref.syntax().text_range(); + if name_ref.syntax().parent().and_then(ast::RecordField::cast).is_some() { + self.record_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset); + } + + let top_node = name_ref + .syntax() + .ancestors() + .take_while(|it| it.text_range() == name_range) + .last() + .unwrap(); + + match top_node.parent().map(|it| it.kind()) { + Some(SOURCE_FILE) | Some(ITEM_LIST) => { + self.is_new_item = true; + return; + } + _ => (), + } + + self.use_item_syntax = self.token.parent().ancestors().find_map(ast::UseItem::cast); + + self.function_syntax = self + .token + .parent() + .ancestors() + .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) + .find_map(ast::FnDef::cast); + + let parent = match name_ref.syntax().parent() { + Some(it) => it, + None => return, + }; + + if let Some(segment) = ast::PathSegment::cast(parent.clone()) { + let path = segment.parent_path(); + self.is_call = path + .syntax() + .parent() + .and_then(ast::PathExpr::cast) + .and_then(|it| it.syntax().parent().and_then(ast::CallExpr::cast)) + .is_some(); + + self.is_path_type = path.syntax().parent().and_then(ast::PathType::cast).is_some(); + self.has_type_args = segment.type_arg_list().is_some(); + + if let Some(mut path) = hir::Path::from_ast(path.clone()) { + if !path.is_ident() { + path.segments.pop().unwrap(); + self.path_prefix = Some(path); + return; + } + } + + if path.qualifier().is_none() { + self.is_trivial_path = true; + + // Find either enclosing expr statement (thing with `;`) or a + // block. If block, check that we are the last expr. + self.can_be_stmt = name_ref + .syntax() + .ancestors() + .find_map(|node| { + if let Some(stmt) = ast::ExprStmt::cast(node.clone()) { + return Some( + stmt.syntax().text_range() == name_ref.syntax().text_range(), + ); + } + if let Some(block) = ast::Block::cast(node) { + return Some( + block.expr().map(|e| e.syntax().text_range()) + == Some(name_ref.syntax().text_range()), + ); + } + None + }) + .unwrap_or(false); + + if let Some(off) = name_ref.syntax().text_range().start().checked_sub(2.into()) { + if let Some(if_expr) = + find_node_at_offset::(original_file.syntax(), off) + { + if if_expr.syntax().text_range().end() + < name_ref.syntax().text_range().start() + { + self.after_if = true; + } + } + } + } + } + if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { + // The receiver comes before the point of insertion of the fake + // ident, so it should have the same range in the non-modified file + self.dot_receiver = field_expr + .expr() + .map(|e| e.syntax().text_range()) + .and_then(|r| find_node_with_range(original_file.syntax(), r)); + self.dot_receiver_is_ambiguous_float_literal = if let Some(ast::Expr::Literal(l)) = + &self.dot_receiver + { + match l.kind() { + ast::LiteralKind::FloatNumber { suffix: _ } => l.token().text().ends_with('.'), + _ => false, + } + } else { + false + } + } + if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent) { + // As above + self.dot_receiver = method_call_expr + .expr() + .map(|e| e.syntax().text_range()) + .and_then(|r| find_node_with_range(original_file.syntax(), r)); + self.is_call = true; + } + } +} + +fn find_node_with_range(syntax: &SyntaxNode, range: TextRange) -> Option { + find_covering_element(syntax, range).ancestors().find_map(N::cast) +} + +fn is_node(node: &SyntaxNode) -> bool { + match node.ancestors().find_map(N::cast) { + None => false, + Some(n) => n.syntax().text_range() == node.text_range(), + } +} diff --git a/crates/ra_ide/src/completion/completion_item.rs b/crates/ra_ide/src/completion/completion_item.rs new file mode 100644 index 000000000..93f336370 --- /dev/null +++ b/crates/ra_ide/src/completion/completion_item.rs @@ -0,0 +1,322 @@ +//! FIXME: write short doc here + +use std::fmt; + +use hir::Documentation; +use ra_syntax::TextRange; +use ra_text_edit::TextEdit; + +/// `CompletionItem` describes a single completion variant in the editor pop-up. +/// It is basically a POD with various properties. To construct a +/// `CompletionItem`, use `new` method and the `Builder` struct. +pub struct CompletionItem { + /// Used only internally in tests, to check only specific kind of + /// completion (postfix, keyword, reference, etc). + #[allow(unused)] + completion_kind: CompletionKind, + /// Label in the completion pop up which identifies completion. + label: String, + /// Range of identifier that is being completed. + /// + /// It should be used primarily for UI, but we also use this to convert + /// genetic TextEdit into LSP's completion edit (see conv.rs). + /// + /// `source_range` must contain the completion offset. `insert_text` should + /// start with what `source_range` points to, or VSCode will filter out the + /// completion silently. + source_range: TextRange, + /// What happens when user selects this item. + /// + /// Typically, replaces `source_range` with new identifier. + text_edit: TextEdit, + insert_text_format: InsertTextFormat, + + /// What item (struct, function, etc) are we completing. + kind: Option, + + /// Lookup is used to check if completion item indeed can complete current + /// ident. + /// + /// That is, in `foo.bar<|>` lookup of `abracadabra` will be accepted (it + /// contains `bar` sub sequence), and `quux` will rejected. + lookup: Option, + + /// Additional info to show in the UI pop up. + detail: Option, + documentation: Option, + + /// Whether this item is marked as deprecated + deprecated: bool, +} + +// We use custom debug for CompletionItem to make `insta`'s diffs more readable. +impl fmt::Debug for CompletionItem { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut s = f.debug_struct("CompletionItem"); + s.field("label", &self.label()).field("source_range", &self.source_range()); + if self.text_edit().as_atoms().len() == 1 { + let atom = &self.text_edit().as_atoms()[0]; + s.field("delete", &atom.delete); + s.field("insert", &atom.insert); + } else { + s.field("text_edit", &self.text_edit); + } + if let Some(kind) = self.kind().as_ref() { + s.field("kind", kind); + } + if self.lookup() != self.label() { + s.field("lookup", &self.lookup()); + } + if let Some(detail) = self.detail() { + s.field("detail", &detail); + } + if let Some(documentation) = self.documentation() { + s.field("documentation", &documentation); + } + if self.deprecated { + s.field("deprecated", &true); + } + s.finish() + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum CompletionItemKind { + Snippet, + Keyword, + Module, + Function, + BuiltinType, + Struct, + Enum, + EnumVariant, + Binding, + Field, + Static, + Const, + Trait, + TypeAlias, + Method, + TypeParam, + Macro, +} + +#[derive(Debug, PartialEq, Eq, Copy, Clone)] +pub(crate) enum CompletionKind { + /// Parser-based keyword completion. + Keyword, + /// Your usual "complete all valid identifiers". + Reference, + /// "Secret sauce" completions. + Magic, + Snippet, + Postfix, + BuiltinType, +} + +#[derive(Debug, PartialEq, Eq, Copy, Clone)] +pub enum InsertTextFormat { + PlainText, + Snippet, +} + +impl CompletionItem { + pub(crate) fn new( + completion_kind: CompletionKind, + source_range: TextRange, + label: impl Into, + ) -> Builder { + let label = label.into(); + Builder { + source_range, + completion_kind, + label, + insert_text: None, + insert_text_format: InsertTextFormat::PlainText, + detail: None, + documentation: None, + lookup: None, + kind: None, + text_edit: None, + deprecated: None, + } + } + /// What user sees in pop-up in the UI. + pub fn label(&self) -> &str { + &self.label + } + pub fn source_range(&self) -> TextRange { + self.source_range + } + + pub fn insert_text_format(&self) -> InsertTextFormat { + self.insert_text_format + } + + pub fn text_edit(&self) -> &TextEdit { + &self.text_edit + } + + /// Short one-line additional information, like a type + pub fn detail(&self) -> Option<&str> { + self.detail.as_ref().map(|it| it.as_str()) + } + /// A doc-comment + pub fn documentation(&self) -> Option { + self.documentation.clone() + } + /// What string is used for filtering. + pub fn lookup(&self) -> &str { + self.lookup.as_ref().map(|it| it.as_str()).unwrap_or_else(|| self.label()) + } + + pub fn kind(&self) -> Option { + self.kind + } + + pub fn deprecated(&self) -> bool { + self.deprecated + } +} + +/// A helper to make `CompletionItem`s. +#[must_use] +pub(crate) struct Builder { + source_range: TextRange, + completion_kind: CompletionKind, + label: String, + insert_text: Option, + insert_text_format: InsertTextFormat, + detail: Option, + documentation: Option, + lookup: Option, + kind: Option, + text_edit: Option, + deprecated: Option, +} + +impl Builder { + pub(crate) fn add_to(self, acc: &mut Completions) { + acc.add(self.build()) + } + + pub(crate) fn build(self) -> CompletionItem { + let label = self.label; + let text_edit = match self.text_edit { + Some(it) => it, + None => TextEdit::replace( + self.source_range, + self.insert_text.unwrap_or_else(|| label.clone()), + ), + }; + + CompletionItem { + source_range: self.source_range, + label, + insert_text_format: self.insert_text_format, + text_edit, + detail: self.detail, + documentation: self.documentation, + lookup: self.lookup, + kind: self.kind, + completion_kind: self.completion_kind, + deprecated: self.deprecated.unwrap_or(false), + } + } + pub(crate) fn lookup_by(mut self, lookup: impl Into) -> Builder { + self.lookup = Some(lookup.into()); + self + } + pub(crate) fn label(mut self, label: impl Into) -> Builder { + self.label = label.into(); + self + } + pub(crate) fn insert_text(mut self, insert_text: impl Into) -> Builder { + self.insert_text = Some(insert_text.into()); + self + } + pub(crate) fn insert_snippet(mut self, snippet: impl Into) -> Builder { + self.insert_text_format = InsertTextFormat::Snippet; + self.insert_text(snippet) + } + pub(crate) fn kind(mut self, kind: CompletionItemKind) -> Builder { + self.kind = Some(kind); + self + } + pub(crate) fn text_edit(mut self, edit: TextEdit) -> Builder { + self.text_edit = Some(edit); + self + } + pub(crate) fn snippet_edit(mut self, edit: TextEdit) -> Builder { + self.insert_text_format = InsertTextFormat::Snippet; + self.text_edit(edit) + } + #[allow(unused)] + pub(crate) fn detail(self, detail: impl Into) -> Builder { + self.set_detail(Some(detail)) + } + pub(crate) fn set_detail(mut self, detail: Option>) -> Builder { + self.detail = detail.map(Into::into); + self + } + #[allow(unused)] + pub(crate) fn documentation(self, docs: Documentation) -> Builder { + self.set_documentation(Some(docs)) + } + pub(crate) fn set_documentation(mut self, docs: Option) -> Builder { + self.documentation = docs.map(Into::into); + self + } + pub(crate) fn set_deprecated(mut self, deprecated: bool) -> Builder { + self.deprecated = Some(deprecated); + self + } +} + +impl<'a> Into for Builder { + fn into(self) -> CompletionItem { + self.build() + } +} + +/// Represents an in-progress set of completions being built. +#[derive(Debug, Default)] +pub(crate) struct Completions { + buf: Vec, +} + +impl Completions { + pub(crate) fn add(&mut self, item: impl Into) { + self.buf.push(item.into()) + } + pub(crate) fn add_all(&mut self, items: I) + where + I: IntoIterator, + I::Item: Into, + { + items.into_iter().for_each(|item| self.add(item.into())) + } +} + +impl Into> for Completions { + fn into(self) -> Vec { + self.buf + } +} + +#[cfg(test)] +pub(crate) fn do_completion(code: &str, kind: CompletionKind) -> Vec { + use crate::completion::completions; + use crate::mock_analysis::{analysis_and_position, single_file_with_position}; + let (analysis, position) = if code.contains("//-") { + analysis_and_position(code) + } else { + single_file_with_position(code) + }; + let completions = completions(&analysis.db, position).unwrap(); + let completion_items: Vec = completions.into(); + let mut kind_completions: Vec = + completion_items.into_iter().filter(|c| c.completion_kind == kind).collect(); + kind_completions.sort_by_key(|c| c.label.clone()); + kind_completions +} diff --git a/crates/ra_ide/src/completion/presentation.rs b/crates/ra_ide/src/completion/presentation.rs new file mode 100644 index 000000000..5f056730a --- /dev/null +++ b/crates/ra_ide/src/completion/presentation.rs @@ -0,0 +1,676 @@ +//! This modules takes care of rendering various definitions as completion items. + +use hir::{db::HirDatabase, Docs, HasAttrs, HasSource, HirDisplay, ScopeDef, Type}; +use join_to_string::join; +use ra_syntax::ast::NameOwner; +use test_utils::tested_by; + +use crate::completion::{ + CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions, +}; + +use crate::display::{const_label, function_label, macro_label, type_label}; + +impl Completions { + pub(crate) fn add_field( + &mut self, + ctx: &CompletionContext, + field: hir::StructField, + ty: &Type, + ) { + let is_deprecated = is_deprecated(field, ctx.db); + CompletionItem::new( + CompletionKind::Reference, + ctx.source_range(), + field.name(ctx.db).to_string(), + ) + .kind(CompletionItemKind::Field) + .detail(ty.display(ctx.db).to_string()) + .set_documentation(field.docs(ctx.db)) + .set_deprecated(is_deprecated) + .add_to(self); + } + + pub(crate) fn add_tuple_field(&mut self, ctx: &CompletionContext, field: usize, ty: &Type) { + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), field.to_string()) + .kind(CompletionItemKind::Field) + .detail(ty.display(ctx.db).to_string()) + .add_to(self); + } + + pub(crate) fn add_resolution( + &mut self, + ctx: &CompletionContext, + local_name: String, + resolution: &ScopeDef, + ) { + use hir::ModuleDef::*; + + let completion_kind = match resolution { + ScopeDef::ModuleDef(BuiltinType(..)) => CompletionKind::BuiltinType, + _ => CompletionKind::Reference, + }; + + let kind = match resolution { + ScopeDef::ModuleDef(Module(..)) => CompletionItemKind::Module, + ScopeDef::ModuleDef(Function(func)) => { + return self.add_function_with_name(ctx, Some(local_name), *func); + } + ScopeDef::ModuleDef(Adt(hir::Adt::Struct(_))) => CompletionItemKind::Struct, + // FIXME: add CompletionItemKind::Union + ScopeDef::ModuleDef(Adt(hir::Adt::Union(_))) => CompletionItemKind::Struct, + ScopeDef::ModuleDef(Adt(hir::Adt::Enum(_))) => CompletionItemKind::Enum, + + ScopeDef::ModuleDef(EnumVariant(..)) => CompletionItemKind::EnumVariant, + ScopeDef::ModuleDef(Const(..)) => CompletionItemKind::Const, + ScopeDef::ModuleDef(Static(..)) => CompletionItemKind::Static, + ScopeDef::ModuleDef(Trait(..)) => CompletionItemKind::Trait, + ScopeDef::ModuleDef(TypeAlias(..)) => CompletionItemKind::TypeAlias, + ScopeDef::ModuleDef(BuiltinType(..)) => CompletionItemKind::BuiltinType, + ScopeDef::GenericParam(..) => CompletionItemKind::TypeParam, + ScopeDef::Local(..) => CompletionItemKind::Binding, + // (does this need its own kind?) + ScopeDef::AdtSelfType(..) | ScopeDef::ImplSelfType(..) => CompletionItemKind::TypeParam, + ScopeDef::MacroDef(mac) => { + return self.add_macro(ctx, Some(local_name), *mac); + } + ScopeDef::Unknown => { + return self.add(CompletionItem::new( + CompletionKind::Reference, + ctx.source_range(), + local_name, + )); + } + }; + + let docs = match resolution { + ScopeDef::ModuleDef(Module(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(Adt(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(EnumVariant(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(Const(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(Static(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(Trait(it)) => it.docs(ctx.db), + ScopeDef::ModuleDef(TypeAlias(it)) => it.docs(ctx.db), + _ => None, + }; + + let mut completion_item = + CompletionItem::new(completion_kind, ctx.source_range(), local_name.clone()); + if let ScopeDef::Local(local) = resolution { + let ty = local.ty(ctx.db); + if !ty.is_unknown() { + completion_item = completion_item.detail(ty.display(ctx.db).to_string()); + } + }; + + // If not an import, add parenthesis automatically. + if ctx.is_path_type + && !ctx.has_type_args + && ctx.db.feature_flags.get("completion.insertion.add-call-parenthesis") + { + let has_non_default_type_params = match resolution { + ScopeDef::ModuleDef(Adt(it)) => it.has_non_default_type_params(ctx.db), + ScopeDef::ModuleDef(TypeAlias(it)) => it.has_non_default_type_params(ctx.db), + _ => false, + }; + if has_non_default_type_params { + tested_by!(inserts_angle_brackets_for_generics); + completion_item = completion_item + .lookup_by(local_name.clone()) + .label(format!("{}<…>", local_name)) + .insert_snippet(format!("{}<$0>", local_name)); + } + } + + completion_item.kind(kind).set_documentation(docs).add_to(self) + } + + pub(crate) fn add_function(&mut self, ctx: &CompletionContext, func: hir::Function) { + self.add_function_with_name(ctx, None, func) + } + + fn guess_macro_braces(&self, macro_name: &str, docs: &str) -> &'static str { + let mut votes = [0, 0, 0]; + for (idx, s) in docs.match_indices(¯o_name) { + let (before, after) = (&docs[..idx], &docs[idx + s.len()..]); + // Ensure to match the full word + if after.starts_with('!') + && before + .chars() + .rev() + .next() + .map_or(true, |c| c != '_' && !c.is_ascii_alphanumeric()) + { + // It may have spaces before the braces like `foo! {}` + match after[1..].chars().find(|&c| !c.is_whitespace()) { + Some('{') => votes[0] += 1, + Some('[') => votes[1] += 1, + Some('(') => votes[2] += 1, + _ => {} + } + } + } + + // Insert a space before `{}`. + // We prefer the last one when some votes equal. + *votes.iter().zip(&[" {$0}", "[$0]", "($0)"]).max_by_key(|&(&vote, _)| vote).unwrap().1 + } + + pub(crate) fn add_macro( + &mut self, + ctx: &CompletionContext, + name: Option, + macro_: hir::MacroDef, + ) { + let name = match name { + Some(it) => it, + None => return, + }; + + let ast_node = macro_.source(ctx.db).value; + let detail = macro_label(&ast_node); + + let docs = macro_.docs(ctx.db); + let macro_declaration = format!("{}!", name); + + let mut builder = + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), ¯o_declaration) + .kind(CompletionItemKind::Macro) + .set_documentation(docs.clone()) + .set_deprecated(is_deprecated(macro_, ctx.db)) + .detail(detail); + + builder = if ctx.use_item_syntax.is_some() { + builder.insert_text(name) + } else { + let macro_braces_to_insert = + self.guess_macro_braces(&name, docs.as_ref().map_or("", |s| s.as_str())); + builder.insert_snippet(macro_declaration + macro_braces_to_insert) + }; + + self.add(builder); + } + + fn add_function_with_name( + &mut self, + ctx: &CompletionContext, + name: Option, + func: hir::Function, + ) { + let func_name = func.name(ctx.db); + let has_self_param = func.has_self_param(ctx.db); + let params = func.params(ctx.db); + + let name = name.unwrap_or_else(|| func_name.to_string()); + let ast_node = func.source(ctx.db).value; + let detail = function_label(&ast_node); + + let mut builder = + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.clone()) + .kind(if has_self_param { + CompletionItemKind::Method + } else { + CompletionItemKind::Function + }) + .set_documentation(func.docs(ctx.db)) + .set_deprecated(is_deprecated(func, ctx.db)) + .detail(detail); + + // Add `<>` for generic types + if ctx.use_item_syntax.is_none() + && !ctx.is_call + && ctx.db.feature_flags.get("completion.insertion.add-call-parenthesis") + { + tested_by!(inserts_parens_for_function_calls); + let (snippet, label) = if params.is_empty() || has_self_param && params.len() == 1 { + (format!("{}()$0", func_name), format!("{}()", name)) + } else { + (format!("{}($0)", func_name), format!("{}(…)", name)) + }; + builder = builder.lookup_by(name).label(label).insert_snippet(snippet); + } + + self.add(builder) + } + + pub(crate) fn add_const(&mut self, ctx: &CompletionContext, constant: hir::Const) { + let ast_node = constant.source(ctx.db).value; + let name = match ast_node.name() { + Some(name) => name, + _ => return, + }; + let detail = const_label(&ast_node); + + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.text().to_string()) + .kind(CompletionItemKind::Const) + .set_documentation(constant.docs(ctx.db)) + .set_deprecated(is_deprecated(constant, ctx.db)) + .detail(detail) + .add_to(self); + } + + pub(crate) fn add_type_alias(&mut self, ctx: &CompletionContext, type_alias: hir::TypeAlias) { + let type_def = type_alias.source(ctx.db).value; + let name = match type_def.name() { + Some(name) => name, + _ => return, + }; + let detail = type_label(&type_def); + + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.text().to_string()) + .kind(CompletionItemKind::TypeAlias) + .set_documentation(type_alias.docs(ctx.db)) + .set_deprecated(is_deprecated(type_alias, ctx.db)) + .detail(detail) + .add_to(self); + } + + pub(crate) fn add_enum_variant(&mut self, ctx: &CompletionContext, variant: hir::EnumVariant) { + let is_deprecated = is_deprecated(variant, ctx.db); + let name = match variant.name(ctx.db) { + Some(it) => it, + None => return, + }; + let detail_types = variant.fields(ctx.db).into_iter().map(|field| field.ty(ctx.db)); + let detail = join(detail_types.map(|t| t.display(ctx.db).to_string())) + .separator(", ") + .surround_with("(", ")") + .to_string(); + CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.to_string()) + .kind(CompletionItemKind::EnumVariant) + .set_documentation(variant.docs(ctx.db)) + .set_deprecated(is_deprecated) + .detail(detail) + .add_to(self); + } +} + +fn is_deprecated(node: impl HasAttrs, db: &impl HirDatabase) -> bool { + node.attrs(db).by_key("deprecated").exists() +} + +#[cfg(test)] +mod tests { + use insta::assert_debug_snapshot; + use test_utils::covers; + + use crate::completion::{do_completion, CompletionItem, CompletionKind}; + + fn do_reference_completion(code: &str) -> Vec { + do_completion(code, CompletionKind::Reference) + } + + #[test] + fn sets_deprecated_flag_in_completion_items() { + assert_debug_snapshot!( + do_reference_completion( + r#" + #[deprecated] + fn something_deprecated() {} + + #[deprecated(since = "1.0.0")] + fn something_else_deprecated() {} + + fn main() { som<|> } + "#, + ), + @r###" + [ + CompletionItem { + label: "main()", + source_range: [203; 206), + delete: [203; 206), + insert: "main()$0", + kind: Function, + lookup: "main", + detail: "fn main()", + }, + CompletionItem { + label: "something_deprecated()", + source_range: [203; 206), + delete: [203; 206), + insert: "something_deprecated()$0", + kind: Function, + lookup: "something_deprecated", + detail: "fn something_deprecated()", + deprecated: true, + }, + CompletionItem { + label: "something_else_deprecated()", + source_range: [203; 206), + delete: [203; 206), + insert: "something_else_deprecated()$0", + kind: Function, + lookup: "something_else_deprecated", + detail: "fn something_else_deprecated()", + deprecated: true, + }, + ] + "### + ); + } + + #[test] + fn inserts_parens_for_function_calls() { + covers!(inserts_parens_for_function_calls); + assert_debug_snapshot!( + do_reference_completion( + r" + fn no_args() {} + fn main() { no_<|> } + " + ), + @r###" + [ + CompletionItem { + label: "main()", + source_range: [61; 64), + delete: [61; 64), + insert: "main()$0", + kind: Function, + lookup: "main", + detail: "fn main()", + }, + CompletionItem { + label: "no_args()", + source_range: [61; 64), + delete: [61; 64), + insert: "no_args()$0", + kind: Function, + lookup: "no_args", + detail: "fn no_args()", + }, + ] + "### + ); + assert_debug_snapshot!( + do_reference_completion( + r" + fn with_args(x: i32, y: String) {} + fn main() { with_<|> } + " + ), + @r###" + [ + CompletionItem { + label: "main()", + source_range: [80; 85), + delete: [80; 85), + insert: "main()$0", + kind: Function, + lookup: "main", + detail: "fn main()", + }, + CompletionItem { + label: "with_args(…)", + source_range: [80; 85), + delete: [80; 85), + insert: "with_args($0)", + kind: Function, + lookup: "with_args", + detail: "fn with_args(x: i32, y: String)", + }, + ] + "### + ); + assert_debug_snapshot!( + do_reference_completion( + r" + struct S {} + impl S { + fn foo(&self) {} + } + fn bar(s: &S) { + s.f<|> + } + " + ), + @r###" + [ + CompletionItem { + label: "foo()", + source_range: [163; 164), + delete: [163; 164), + insert: "foo()$0", + kind: Method, + lookup: "foo", + detail: "fn foo(&self)", + }, + ] + "### + ); + } + + #[test] + fn dont_render_function_parens_in_use_item() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /lib.rs + mod m { pub fn foo() {} } + use crate::m::f<|>; + " + ), + @r###" + [ + CompletionItem { + label: "foo", + source_range: [40; 41), + delete: [40; 41), + insert: "foo", + kind: Function, + detail: "pub fn foo()", + }, + ] + "### + ); + } + + #[test] + fn dont_render_function_parens_if_already_call() { + assert_debug_snapshot!( + do_reference_completion( + " + //- /lib.rs + fn frobnicate() {} + fn main() { + frob<|>(); + } + " + ), + @r###" + [ + CompletionItem { + label: "frobnicate", + source_range: [35; 39), + delete: [35; 39), + insert: "frobnicate", + kind: Function, + detail: "fn frobnicate()", + }, + CompletionItem { + label: "main", + source_range: [35; 39), + delete: [35; 39), + insert: "main", + kind: Function, + detail: "fn main()", + }, + ] + "### + ); + assert_debug_snapshot!( + do_reference_completion( + " + //- /lib.rs + struct Foo {} + impl Foo { fn new() -> Foo {} } + fn main() { + Foo::ne<|>(); + } + " + ), + @r###" + [ + CompletionItem { + label: "new", + source_range: [67; 69), + delete: [67; 69), + insert: "new", + kind: Function, + detail: "fn new() -> Foo", + }, + ] + "### + ); + } + + #[test] + fn inserts_angle_brackets_for_generics() { + covers!(inserts_angle_brackets_for_generics); + assert_debug_snapshot!( + do_reference_completion( + r" + struct Vec {} + fn foo(xs: Ve<|>) + " + ), + @r###" + [ + CompletionItem { + label: "Vec<…>", + source_range: [61; 63), + delete: [61; 63), + insert: "Vec<$0>", + kind: Struct, + lookup: "Vec", + }, + CompletionItem { + label: "foo(…)", + source_range: [61; 63), + delete: [61; 63), + insert: "foo($0)", + kind: Function, + lookup: "foo", + detail: "fn foo(xs: Ve)", + }, + ] + "### + ); + assert_debug_snapshot!( + do_reference_completion( + r" + type Vec = (T,); + fn foo(xs: Ve<|>) + " + ), + @r###" + [ + CompletionItem { + label: "Vec<…>", + source_range: [64; 66), + delete: [64; 66), + insert: "Vec<$0>", + kind: TypeAlias, + lookup: "Vec", + }, + CompletionItem { + label: "foo(…)", + source_range: [64; 66), + delete: [64; 66), + insert: "foo($0)", + kind: Function, + lookup: "foo", + detail: "fn foo(xs: Ve)", + }, + ] + "### + ); + assert_debug_snapshot!( + do_reference_completion( + r" + struct Vec {} + fn foo(xs: Ve<|>) + " + ), + @r###" + [ + CompletionItem { + label: "Vec", + source_range: [68; 70), + delete: [68; 70), + insert: "Vec", + kind: Struct, + }, + CompletionItem { + label: "foo(…)", + source_range: [68; 70), + delete: [68; 70), + insert: "foo($0)", + kind: Function, + lookup: "foo", + detail: "fn foo(xs: Ve)", + }, + ] + "### + ); + assert_debug_snapshot!( + do_reference_completion( + r" + struct Vec {} + fn foo(xs: Ve<|>) + " + ), + @r###" + [ + CompletionItem { + label: "Vec", + source_range: [61; 63), + delete: [61; 63), + insert: "Vec", + kind: Struct, + }, + CompletionItem { + label: "foo(…)", + source_range: [61; 63), + delete: [61; 63), + insert: "foo($0)", + kind: Function, + lookup: "foo", + detail: "fn foo(xs: Ve)", + }, + ] + "### + ); + } + + #[test] + fn dont_insert_macro_call_braces_in_use() { + assert_debug_snapshot!( + do_reference_completion( + r" + //- /main.rs + use foo::<|>; + + //- /foo/lib.rs + #[macro_export] + macro_rules frobnicate { + () => () + } + " + ), + @r###" + [ + CompletionItem { + label: "frobnicate!", + source_range: [9; 9), + delete: [9; 9), + insert: "frobnicate", + kind: Macro, + detail: "#[macro_export]\nmacro_rules! frobnicate", + }, + ] + "### + ) + } +} diff --git a/crates/ra_ide/src/db.rs b/crates/ra_ide/src/db.rs new file mode 100644 index 000000000..f739ebecd --- /dev/null +++ b/crates/ra_ide/src/db.rs @@ -0,0 +1,144 @@ +//! FIXME: write short doc here + +use std::sync::Arc; + +use ra_db::{ + salsa::{self, Database, Durability}, + Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath, + SourceDatabase, SourceDatabaseExt, SourceRootId, +}; +use rustc_hash::FxHashMap; + +use crate::{ + symbol_index::{self, SymbolsDatabase}, + FeatureFlags, LineIndex, +}; + +#[salsa::database( + ra_db::SourceDatabaseStorage, + ra_db::SourceDatabaseExtStorage, + LineIndexDatabaseStorage, + symbol_index::SymbolsDatabaseStorage, + hir::db::InternDatabaseStorage, + hir::db::AstDatabaseStorage, + hir::db::DefDatabaseStorage, + hir::db::HirDatabaseStorage +)] +#[derive(Debug)] +pub(crate) struct RootDatabase { + runtime: salsa::Runtime, + pub(crate) feature_flags: Arc, + pub(crate) debug_data: Arc, + pub(crate) last_gc: crate::wasm_shims::Instant, + pub(crate) last_gc_check: crate::wasm_shims::Instant, +} + +impl FileLoader for RootDatabase { + fn file_text(&self, file_id: FileId) -> Arc { + FileLoaderDelegate(self).file_text(file_id) + } + fn resolve_relative_path( + &self, + anchor: FileId, + relative_path: &RelativePath, + ) -> Option { + FileLoaderDelegate(self).resolve_relative_path(anchor, relative_path) + } + fn relevant_crates(&self, file_id: FileId) -> Arc> { + FileLoaderDelegate(self).relevant_crates(file_id) + } +} + +impl hir::debug::HirDebugHelper for RootDatabase { + fn crate_name(&self, krate: CrateId) -> Option { + self.debug_data.crate_names.get(&krate).cloned() + } + fn file_path(&self, file_id: FileId) -> Option { + let source_root_id = self.file_source_root(file_id); + let source_root_path = self.debug_data.root_paths.get(&source_root_id)?; + let file_path = self.file_relative_path(file_id); + Some(format!("{}/{}", source_root_path, file_path)) + } +} + +impl salsa::Database for RootDatabase { + fn salsa_runtime(&self) -> &salsa::Runtime { + &self.runtime + } + fn salsa_runtime_mut(&mut self) -> &mut salsa::Runtime { + &mut self.runtime + } + fn on_propagated_panic(&self) -> ! { + Canceled::throw() + } + fn salsa_event(&self, event: impl Fn() -> salsa::Event) { + match event().kind { + salsa::EventKind::DidValidateMemoizedValue { .. } + | salsa::EventKind::WillExecute { .. } => { + self.check_canceled(); + } + _ => (), + } + } +} + +impl Default for RootDatabase { + fn default() -> RootDatabase { + RootDatabase::new(None, FeatureFlags::default()) + } +} + +impl RootDatabase { + pub fn new(lru_capacity: Option, feature_flags: FeatureFlags) -> RootDatabase { + let mut db = RootDatabase { + runtime: salsa::Runtime::default(), + last_gc: crate::wasm_shims::Instant::now(), + last_gc_check: crate::wasm_shims::Instant::now(), + feature_flags: Arc::new(feature_flags), + debug_data: Default::default(), + }; + db.set_crate_graph_with_durability(Default::default(), Durability::HIGH); + db.set_local_roots_with_durability(Default::default(), Durability::HIGH); + db.set_library_roots_with_durability(Default::default(), Durability::HIGH); + let lru_capacity = lru_capacity.unwrap_or(ra_db::DEFAULT_LRU_CAP); + db.query_mut(ra_db::ParseQuery).set_lru_capacity(lru_capacity); + db.query_mut(hir::db::ParseMacroQuery).set_lru_capacity(lru_capacity); + db.query_mut(hir::db::MacroExpandQuery).set_lru_capacity(lru_capacity); + db + } +} + +impl salsa::ParallelDatabase for RootDatabase { + fn snapshot(&self) -> salsa::Snapshot { + salsa::Snapshot::new(RootDatabase { + runtime: self.runtime.snapshot(self), + last_gc: self.last_gc, + last_gc_check: self.last_gc_check, + feature_flags: Arc::clone(&self.feature_flags), + debug_data: Arc::clone(&self.debug_data), + }) + } +} + +#[salsa::query_group(LineIndexDatabaseStorage)] +pub(crate) trait LineIndexDatabase: ra_db::SourceDatabase + CheckCanceled { + fn line_index(&self, file_id: FileId) -> Arc; +} + +fn line_index(db: &impl LineIndexDatabase, file_id: FileId) -> Arc { + let text = db.file_text(file_id); + Arc::new(LineIndex::new(&*text)) +} + +#[derive(Debug, Default, Clone)] +pub(crate) struct DebugData { + pub(crate) root_paths: FxHashMap, + pub(crate) crate_names: FxHashMap, +} + +impl DebugData { + pub(crate) fn merge(&mut self, other: DebugData) { + self.root_paths.extend(other.root_paths.into_iter()); + self.crate_names.extend(other.crate_names.into_iter()); + } +} diff --git a/crates/ra_ide/src/diagnostics.rs b/crates/ra_ide/src/diagnostics.rs new file mode 100644 index 000000000..cc1ccab4b --- /dev/null +++ b/crates/ra_ide/src/diagnostics.rs @@ -0,0 +1,652 @@ +//! FIXME: write short doc here + +use std::cell::RefCell; + +use hir::diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink}; +use itertools::Itertools; +use ra_db::{RelativePath, SourceDatabase, SourceDatabaseExt}; +use ra_prof::profile; +use ra_syntax::{ + algo, + ast::{self, make, AstNode}, + Location, SyntaxNode, TextRange, T, +}; +use ra_text_edit::{TextEdit, TextEditBuilder}; + +use crate::{db::RootDatabase, Diagnostic, FileId, FileSystemEdit, SourceChange, SourceFileEdit}; + +#[derive(Debug, Copy, Clone)] +pub enum Severity { + Error, + WeakWarning, +} + +pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec { + let _p = profile("diagnostics"); + let parse = db.parse(file_id); + let mut res = Vec::new(); + + res.extend(parse.errors().iter().map(|err| Diagnostic { + range: location_to_range(err.location()), + message: format!("Syntax Error: {}", err), + severity: Severity::Error, + fix: None, + })); + + for node in parse.tree().syntax().descendants() { + check_unnecessary_braces_in_use_statement(&mut res, file_id, &node); + check_struct_shorthand_initialization(&mut res, file_id, &node); + } + let res = RefCell::new(res); + let mut sink = DiagnosticSink::new(|d| { + res.borrow_mut().push(Diagnostic { + message: d.message(), + range: d.highlight_range(), + severity: Severity::Error, + fix: None, + }) + }) + .on::(|d| { + let original_file = d.source().file_id.original_file(db); + let source_root = db.file_source_root(original_file); + let path = db + .file_relative_path(original_file) + .parent() + .unwrap_or_else(|| RelativePath::new("")) + .join(&d.candidate); + let create_file = FileSystemEdit::CreateFile { source_root, path }; + let fix = SourceChange::file_system_edit("create module", create_file); + res.borrow_mut().push(Diagnostic { + range: d.highlight_range(), + message: d.message(), + severity: Severity::Error, + fix: Some(fix), + }) + }) + .on::(|d| { + let mut field_list = d.ast(db); + for f in d.missed_fields.iter() { + let field = make::record_field(make::name_ref(&f.to_string()), Some(make::expr_unit())); + field_list = field_list.append_field(&field); + } + + let mut builder = TextEditBuilder::default(); + algo::diff(&d.ast(db).syntax(), &field_list.syntax()).into_text_edit(&mut builder); + + let fix = + SourceChange::source_file_edit_from("fill struct fields", file_id, builder.finish()); + res.borrow_mut().push(Diagnostic { + range: d.highlight_range(), + message: d.message(), + severity: Severity::Error, + fix: Some(fix), + }) + }) + .on::(|d| { + let node = d.ast(db); + let replacement = format!("Ok({})", node.syntax()); + let edit = TextEdit::replace(node.syntax().text_range(), replacement); + let fix = SourceChange::source_file_edit_from("wrap with ok", file_id, edit); + res.borrow_mut().push(Diagnostic { + range: d.highlight_range(), + message: d.message(), + severity: Severity::Error, + fix: Some(fix), + }) + }); + let source_file = db.parse(file_id).tree(); + let src = + hir::Source { file_id: file_id.into(), value: hir::ModuleSource::SourceFile(source_file) }; + if let Some(m) = hir::Module::from_definition(db, src) { + m.diagnostics(db, &mut sink); + }; + drop(sink); + res.into_inner() +} +fn location_to_range(location: Location) -> TextRange { + match location { + Location::Offset(offset) => TextRange::offset_len(offset, 1.into()), + Location::Range(range) => range, + } +} + +fn check_unnecessary_braces_in_use_statement( + acc: &mut Vec, + file_id: FileId, + node: &SyntaxNode, +) -> Option<()> { + let use_tree_list = ast::UseTreeList::cast(node.clone())?; + if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() { + let range = use_tree_list.syntax().text_range(); + let edit = + text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(&single_use_tree) + .unwrap_or_else(|| { + let to_replace = single_use_tree.syntax().text().to_string(); + let mut edit_builder = TextEditBuilder::default(); + edit_builder.delete(range); + edit_builder.insert(range.start(), to_replace); + edit_builder.finish() + }); + + acc.push(Diagnostic { + range, + message: "Unnecessary braces in use statement".to_string(), + severity: Severity::WeakWarning, + fix: Some(SourceChange::source_file_edit( + "Remove unnecessary braces", + SourceFileEdit { file_id, edit }, + )), + }); + } + + Some(()) +} + +fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement( + single_use_tree: &ast::UseTree, +) -> Option { + let use_tree_list_node = single_use_tree.syntax().parent()?; + if single_use_tree.path()?.segment()?.syntax().first_child_or_token()?.kind() == T![self] { + let start = use_tree_list_node.prev_sibling_or_token()?.text_range().start(); + let end = use_tree_list_node.text_range().end(); + let range = TextRange::from_to(start, end); + return Some(TextEdit::delete(range)); + } + None +} + +fn check_struct_shorthand_initialization( + acc: &mut Vec, + file_id: FileId, + node: &SyntaxNode, +) -> Option<()> { + let record_lit = ast::RecordLit::cast(node.clone())?; + let record_field_list = record_lit.record_field_list()?; + for record_field in record_field_list.fields() { + if let (Some(name_ref), Some(expr)) = (record_field.name_ref(), record_field.expr()) { + let field_name = name_ref.syntax().text().to_string(); + let field_expr = expr.syntax().text().to_string(); + if field_name == field_expr { + let mut edit_builder = TextEditBuilder::default(); + edit_builder.delete(record_field.syntax().text_range()); + edit_builder.insert(record_field.syntax().text_range().start(), field_name); + let edit = edit_builder.finish(); + + acc.push(Diagnostic { + range: record_field.syntax().text_range(), + message: "Shorthand struct initialization".to_string(), + severity: Severity::WeakWarning, + fix: Some(SourceChange::source_file_edit( + "use struct shorthand initialization", + SourceFileEdit { file_id, edit }, + )), + }); + } + } + } + Some(()) +} + +#[cfg(test)] +mod tests { + use insta::assert_debug_snapshot; + use join_to_string::join; + use ra_syntax::SourceFile; + use test_utils::assert_eq_text; + + use crate::mock_analysis::{analysis_and_position, single_file}; + + use super::*; + + type DiagnosticChecker = fn(&mut Vec, FileId, &SyntaxNode) -> Option<()>; + + fn check_not_applicable(code: &str, func: DiagnosticChecker) { + let parse = SourceFile::parse(code); + let mut diagnostics = Vec::new(); + for node in parse.tree().syntax().descendants() { + func(&mut diagnostics, FileId(0), &node); + } + assert!(diagnostics.is_empty()); + } + + fn check_apply(before: &str, after: &str, func: DiagnosticChecker) { + let parse = SourceFile::parse(before); + let mut diagnostics = Vec::new(); + for node in parse.tree().syntax().descendants() { + func(&mut diagnostics, FileId(0), &node); + } + let diagnostic = + diagnostics.pop().unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before)); + let mut fix = diagnostic.fix.unwrap(); + let edit = fix.source_file_edits.pop().unwrap().edit; + let actual = edit.apply(&before); + assert_eq_text!(after, &actual); + } + + /// Takes a multi-file input fixture with annotated cursor positions, + /// and checks that: + /// * a diagnostic is produced + /// * this diagnostic touches the input cursor position + /// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied + fn check_apply_diagnostic_fix_from_position(fixture: &str, after: &str) { + let (analysis, file_position) = analysis_and_position(fixture); + let diagnostic = analysis.diagnostics(file_position.file_id).unwrap().pop().unwrap(); + let mut fix = diagnostic.fix.unwrap(); + let edit = fix.source_file_edits.pop().unwrap().edit; + let target_file_contents = analysis.file_text(file_position.file_id).unwrap(); + let actual = edit.apply(&target_file_contents); + + // Strip indent and empty lines from `after`, to match the behaviour of + // `parse_fixture` called from `analysis_and_position`. + let margin = fixture + .lines() + .filter(|it| it.trim_start().starts_with("//-")) + .map(|it| it.len() - it.trim_start().len()) + .next() + .expect("empty fixture"); + let after = join(after.lines().filter_map(|line| { + if line.len() > margin { + Some(&line[margin..]) + } else { + None + } + })) + .separator("\n") + .suffix("\n") + .to_string(); + + assert_eq_text!(&after, &actual); + assert!( + diagnostic.range.start() <= file_position.offset + && diagnostic.range.end() >= file_position.offset, + "diagnostic range {} does not touch cursor position {}", + diagnostic.range, + file_position.offset + ); + } + + fn check_apply_diagnostic_fix(before: &str, after: &str) { + let (analysis, file_id) = single_file(before); + let diagnostic = analysis.diagnostics(file_id).unwrap().pop().unwrap(); + let mut fix = diagnostic.fix.unwrap(); + let edit = fix.source_file_edits.pop().unwrap().edit; + let actual = edit.apply(&before); + assert_eq_text!(after, &actual); + } + + /// Takes a multi-file input fixture with annotated cursor position and checks that no diagnostics + /// apply to the file containing the cursor. + fn check_no_diagnostic_for_target_file(fixture: &str) { + let (analysis, file_position) = analysis_and_position(fixture); + let diagnostics = analysis.diagnostics(file_position.file_id).unwrap(); + assert_eq!(diagnostics.len(), 0); + } + + fn check_no_diagnostic(content: &str) { + let (analysis, file_id) = single_file(content); + let diagnostics = analysis.diagnostics(file_id).unwrap(); + assert_eq!(diagnostics.len(), 0); + } + + #[test] + fn test_wrap_return_type() { + let before = r#" + //- /main.rs + use std::{string::String, result::Result::{self, Ok, Err}}; + + fn div(x: i32, y: i32) -> Result { + if y == 0 { + return Err("div by zero".into()); + } + x / y<|> + } + + //- /std/lib.rs + pub mod string { + pub struct String { } + } + pub mod result { + pub enum Result { Ok(T), Err(E) } + } + "#; + let after = r#" + use std::{string::String, result::Result::{self, Ok, Err}}; + + fn div(x: i32, y: i32) -> Result { + if y == 0 { + return Err("div by zero".into()); + } + Ok(x / y) + } + "#; + check_apply_diagnostic_fix_from_position(before, after); + } + + #[test] + fn test_wrap_return_type_handles_generic_functions() { + let before = r#" + //- /main.rs + use std::result::Result::{self, Ok, Err}; + + fn div(x: T) -> Result { + if x == 0 { + return Err(7); + } + <|>x + } + + //- /std/lib.rs + pub mod result { + pub enum Result { Ok(T), Err(E) } + } + "#; + let after = r#" + use std::result::Result::{self, Ok, Err}; + + fn div(x: T) -> Result { + if x == 0 { + return Err(7); + } + Ok(x) + } + "#; + check_apply_diagnostic_fix_from_position(before, after); + } + + #[test] + fn test_wrap_return_type_handles_type_aliases() { + let before = r#" + //- /main.rs + use std::{string::String, result::Result::{self, Ok, Err}}; + + type MyResult = Result; + + fn div(x: i32, y: i32) -> MyResult { + if y == 0 { + return Err("div by zero".into()); + } + x <|>/ y + } + + //- /std/lib.rs + pub mod string { + pub struct String { } + } + pub mod result { + pub enum Result { Ok(T), Err(E) } + } + "#; + let after = r#" + use std::{string::String, result::Result::{self, Ok, Err}}; + + type MyResult = Result; + fn div(x: i32, y: i32) -> MyResult { + if y == 0 { + return Err("div by zero".into()); + } + Ok(x / y) + } + "#; + check_apply_diagnostic_fix_from_position(before, after); + } + + #[test] + fn test_wrap_return_type_not_applicable_when_expr_type_does_not_match_ok_type() { + let content = r#" + //- /main.rs + use std::{string::String, result::Result::{self, Ok, Err}}; + + fn foo() -> Result { + 0<|> + } + + //- /std/lib.rs + pub mod string { + pub struct String { } + } + pub mod result { + pub enum Result { Ok(T), Err(E) } + } + "#; + check_no_diagnostic_for_target_file(content); + } + + #[test] + fn test_wrap_return_type_not_applicable_when_return_type_is_not_result() { + let content = r#" + //- /main.rs + use std::{string::String, result::Result::{self, Ok, Err}}; + + enum SomeOtherEnum { + Ok(i32), + Err(String), + } + + fn foo() -> SomeOtherEnum { + 0<|> + } + + //- /std/lib.rs + pub mod string { + pub struct String { } + } + pub mod result { + pub enum Result { Ok(T), Err(E) } + } + "#; + check_no_diagnostic_for_target_file(content); + } + + #[test] + fn test_fill_struct_fields_empty() { + let before = r" + struct TestStruct { + one: i32, + two: i64, + } + + fn test_fn() { + let s = TestStruct{}; + } + "; + let after = r" + struct TestStruct { + one: i32, + two: i64, + } + + fn test_fn() { + let s = TestStruct{ one: (), two: ()}; + } + "; + check_apply_diagnostic_fix(before, after); + } + + #[test] + fn test_fill_struct_fields_partial() { + let before = r" + struct TestStruct { + one: i32, + two: i64, + } + + fn test_fn() { + let s = TestStruct{ two: 2 }; + } + "; + let after = r" + struct TestStruct { + one: i32, + two: i64, + } + + fn test_fn() { + let s = TestStruct{ two: 2, one: () }; + } + "; + check_apply_diagnostic_fix(before, after); + } + + #[test] + fn test_fill_struct_fields_no_diagnostic() { + let content = r" + struct TestStruct { + one: i32, + two: i64, + } + + fn test_fn() { + let one = 1; + let s = TestStruct{ one, two: 2 }; + } + "; + + check_no_diagnostic(content); + } + + #[test] + fn test_fill_struct_fields_no_diagnostic_on_spread() { + let content = r" + struct TestStruct { + one: i32, + two: i64, + } + + fn test_fn() { + let one = 1; + let s = TestStruct{ ..a }; + } + "; + + check_no_diagnostic(content); + } + + #[test] + fn test_unresolved_module_diagnostic() { + let (analysis, file_id) = single_file("mod foo;"); + let diagnostics = analysis.diagnostics(file_id).unwrap(); + assert_debug_snapshot!(diagnostics, @r###" + [ + Diagnostic { + message: "unresolved module", + range: [0; 8), + fix: Some( + SourceChange { + label: "create module", + source_file_edits: [], + file_system_edits: [ + CreateFile { + source_root: SourceRootId( + 0, + ), + path: "foo.rs", + }, + ], + cursor_position: None, + }, + ), + severity: Error, + }, + ] + "###); + } + + #[test] + fn test_check_unnecessary_braces_in_use_statement() { + check_not_applicable( + " + use a; + use a::{c, d::e}; + ", + check_unnecessary_braces_in_use_statement, + ); + check_apply("use {b};", "use b;", check_unnecessary_braces_in_use_statement); + check_apply("use a::{c};", "use a::c;", check_unnecessary_braces_in_use_statement); + check_apply("use a::{self};", "use a;", check_unnecessary_braces_in_use_statement); + check_apply( + "use a::{c, d::{e}};", + "use a::{c, d::e};", + check_unnecessary_braces_in_use_statement, + ); + } + + #[test] + fn test_check_struct_shorthand_initialization() { + check_not_applicable( + r#" + struct A { + a: &'static str + } + + fn main() { + A { + a: "hello" + } + } + "#, + check_struct_shorthand_initialization, + ); + + check_apply( + r#" +struct A { + a: &'static str +} + +fn main() { + let a = "haha"; + A { + a: a + } +} + "#, + r#" +struct A { + a: &'static str +} + +fn main() { + let a = "haha"; + A { + a + } +} + "#, + check_struct_shorthand_initialization, + ); + + check_apply( + r#" +struct A { + a: &'static str, + b: &'static str +} + +fn main() { + let a = "haha"; + let b = "bb"; + A { + a: a, + b + } +} + "#, + r#" +struct A { + a: &'static str, + b: &'static str +} + +fn main() { + let a = "haha"; + let b = "bb"; + A { + a, + b + } +} + "#, + check_struct_shorthand_initialization, + ); + } +} diff --git a/crates/ra_ide/src/display.rs b/crates/ra_ide/src/display.rs new file mode 100644 index 000000000..30617412a --- /dev/null +++ b/crates/ra_ide/src/display.rs @@ -0,0 +1,84 @@ +//! This module contains utilities for turning SyntaxNodes and HIR types +//! into types that may be used to render in a UI. + +mod function_signature; +mod navigation_target; +mod structure; +mod short_label; + +use ra_syntax::{ + ast::{self, AstNode, AttrsOwner, NameOwner, TypeParamsOwner}, + SyntaxKind::{ATTR, COMMENT}, +}; + +pub use function_signature::FunctionSignature; +pub use navigation_target::NavigationTarget; +pub use structure::{file_structure, StructureNode}; + +pub(crate) use navigation_target::{description_from_symbol, docs_from_symbol, ToNav}; +pub(crate) use short_label::ShortLabel; + +pub(crate) fn function_label(node: &ast::FnDef) -> String { + FunctionSignature::from(node).to_string() +} + +pub(crate) fn const_label(node: &ast::ConstDef) -> String { + let label: String = node + .syntax() + .children_with_tokens() + .filter(|child| !(child.kind() == COMMENT || child.kind() == ATTR)) + .map(|node| node.to_string()) + .collect(); + + label.trim().to_owned() +} + +pub(crate) fn type_label(node: &ast::TypeAliasDef) -> String { + let label: String = node + .syntax() + .children_with_tokens() + .filter(|child| !(child.kind() == COMMENT || child.kind() == ATTR)) + .map(|node| node.to_string()) + .collect(); + + label.trim().to_owned() +} + +pub(crate) fn generic_parameters(node: &N) -> Vec { + let mut res = vec![]; + if let Some(type_params) = node.type_param_list() { + res.extend(type_params.lifetime_params().map(|p| p.syntax().text().to_string())); + res.extend(type_params.type_params().map(|p| p.syntax().text().to_string())); + } + res +} + +pub(crate) fn where_predicates(node: &N) -> Vec { + let mut res = vec![]; + if let Some(clause) = node.where_clause() { + res.extend(clause.predicates().map(|p| p.syntax().text().to_string())); + } + res +} + +pub(crate) fn macro_label(node: &ast::MacroCall) -> String { + let name = node.name().map(|name| name.syntax().text().to_string()).unwrap_or_default(); + let vis = if node.has_atom_attr("macro_export") { "#[macro_export]\n" } else { "" }; + format!("{}macro_rules! {}", vis, name) +} + +pub(crate) fn rust_code_markup>(val: CODE) -> String { + rust_code_markup_with_doc::<_, &str>(val, None) +} + +pub(crate) fn rust_code_markup_with_doc(val: CODE, doc: Option) -> String +where + CODE: AsRef, + DOC: AsRef, +{ + if let Some(doc) = doc { + format!("```rust\n{}\n```\n\n{}", val.as_ref(), doc.as_ref()) + } else { + format!("```rust\n{}\n```", val.as_ref()) + } +} diff --git a/crates/ra_ide/src/display/function_signature.rs b/crates/ra_ide/src/display/function_signature.rs new file mode 100644 index 000000000..d96de4e4c --- /dev/null +++ b/crates/ra_ide/src/display/function_signature.rs @@ -0,0 +1,215 @@ +//! FIXME: write short doc here + +use std::fmt::{self, Display}; + +use hir::{Docs, Documentation, HasSource, HirDisplay}; +use join_to_string::join; +use ra_syntax::ast::{self, AstNode, NameOwner, VisibilityOwner}; +use std::convert::From; + +use crate::{ + db, + display::{generic_parameters, where_predicates}, +}; + +#[derive(Debug)] +pub enum CallableKind { + Function, + StructConstructor, + VariantConstructor, + Macro, +} + +/// Contains information about a function signature +#[derive(Debug)] +pub struct FunctionSignature { + pub kind: CallableKind, + /// Optional visibility + pub visibility: Option, + /// Name of the function + pub name: Option, + /// Documentation for the function + pub doc: Option, + /// Generic parameters + pub generic_parameters: Vec, + /// Parameters of the function + pub parameters: Vec, + /// Optional return type + pub ret_type: Option, + /// Where predicates + pub where_predicates: Vec, +} + +impl FunctionSignature { + pub(crate) fn with_doc_opt(mut self, doc: Option) -> Self { + self.doc = doc; + self + } + + pub(crate) fn from_hir(db: &db::RootDatabase, function: hir::Function) -> Self { + let doc = function.docs(db); + let ast_node = function.source(db).value; + FunctionSignature::from(&ast_node).with_doc_opt(doc) + } + + pub(crate) fn from_struct(db: &db::RootDatabase, st: hir::Struct) -> Option { + let node: ast::StructDef = st.source(db).value; + match node.kind() { + ast::StructKind::Record(_) => return None, + _ => (), + }; + + let params = st + .fields(db) + .into_iter() + .map(|field: hir::StructField| { + let ty = field.ty(db); + format!("{}", ty.display(db)) + }) + .collect(); + + Some( + FunctionSignature { + kind: CallableKind::StructConstructor, + visibility: node.visibility().map(|n| n.syntax().text().to_string()), + name: node.name().map(|n| n.text().to_string()), + ret_type: node.name().map(|n| n.text().to_string()), + parameters: params, + generic_parameters: generic_parameters(&node), + where_predicates: where_predicates(&node), + doc: None, + } + .with_doc_opt(st.docs(db)), + ) + } + + pub(crate) fn from_enum_variant( + db: &db::RootDatabase, + variant: hir::EnumVariant, + ) -> Option { + let node: ast::EnumVariant = variant.source(db).value; + match node.kind() { + ast::StructKind::Record(_) | ast::StructKind::Unit => return None, + _ => (), + }; + + let parent_name = match variant.parent_enum(db).name(db) { + Some(name) => name.to_string(), + None => "missing".into(), + }; + + let name = format!("{}::{}", parent_name, variant.name(db).unwrap()); + + let params = variant + .fields(db) + .into_iter() + .map(|field: hir::StructField| { + let name = field.name(db); + let ty = field.ty(db); + format!("{}: {}", name, ty.display(db)) + }) + .collect(); + + Some( + FunctionSignature { + kind: CallableKind::VariantConstructor, + visibility: None, + name: Some(name), + ret_type: None, + parameters: params, + generic_parameters: vec![], + where_predicates: vec![], + doc: None, + } + .with_doc_opt(variant.docs(db)), + ) + } + + pub(crate) fn from_macro(db: &db::RootDatabase, macro_def: hir::MacroDef) -> Option { + let node: ast::MacroCall = macro_def.source(db).value; + + let params = vec![]; + + Some( + FunctionSignature { + kind: CallableKind::Macro, + visibility: None, + name: node.name().map(|n| n.text().to_string()), + ret_type: None, + parameters: params, + generic_parameters: vec![], + where_predicates: vec![], + doc: None, + } + .with_doc_opt(macro_def.docs(db)), + ) + } +} + +impl From<&'_ ast::FnDef> for FunctionSignature { + fn from(node: &ast::FnDef) -> FunctionSignature { + fn param_list(node: &ast::FnDef) -> Vec { + let mut res = vec![]; + if let Some(param_list) = node.param_list() { + if let Some(self_param) = param_list.self_param() { + res.push(self_param.syntax().text().to_string()) + } + + res.extend(param_list.params().map(|param| param.syntax().text().to_string())); + } + res + } + + FunctionSignature { + kind: CallableKind::Function, + visibility: node.visibility().map(|n| n.syntax().text().to_string()), + name: node.name().map(|n| n.text().to_string()), + ret_type: node + .ret_type() + .and_then(|r| r.type_ref()) + .map(|n| n.syntax().text().to_string()), + parameters: param_list(node), + generic_parameters: generic_parameters(node), + where_predicates: where_predicates(node), + // docs are processed separately + doc: None, + } + } +} + +impl Display for FunctionSignature { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if let Some(t) = &self.visibility { + write!(f, "{} ", t)?; + } + + if let Some(name) = &self.name { + match self.kind { + CallableKind::Function => write!(f, "fn {}", name)?, + CallableKind::StructConstructor => write!(f, "struct {}", name)?, + CallableKind::VariantConstructor => write!(f, "{}", name)?, + CallableKind::Macro => write!(f, "{}!", name)?, + } + } + + if !self.generic_parameters.is_empty() { + join(self.generic_parameters.iter()) + .separator(", ") + .surround_with("<", ">") + .to_fmt(f)?; + } + + join(self.parameters.iter()).separator(", ").surround_with("(", ")").to_fmt(f)?; + + if let Some(t) = &self.ret_type { + write!(f, " -> {}", t)?; + } + + if !self.where_predicates.is_empty() { + write!(f, "\nwhere ")?; + join(self.where_predicates.iter()).separator(",\n ").to_fmt(f)?; + } + + Ok(()) + } +} diff --git a/crates/ra_ide/src/display/navigation_target.rs b/crates/ra_ide/src/display/navigation_target.rs new file mode 100644 index 000000000..6ac60722b --- /dev/null +++ b/crates/ra_ide/src/display/navigation_target.rs @@ -0,0 +1,411 @@ +//! FIXME: write short doc here + +use hir::{AssocItem, Either, FieldSource, HasSource, ModuleSource, Source}; +use ra_db::{FileId, SourceDatabase}; +use ra_syntax::{ + ast::{self, DocCommentsOwner, NameOwner}, + match_ast, AstNode, SmolStr, + SyntaxKind::{self, BIND_PAT}, + TextRange, +}; + +use crate::{db::RootDatabase, expand::original_range, FileSymbol}; + +use super::short_label::ShortLabel; + +/// `NavigationTarget` represents and element in the editor's UI which you can +/// click on to navigate to a particular piece of code. +/// +/// Typically, a `NavigationTarget` corresponds to some element in the source +/// code, like a function or a struct, but this is not strictly required. +#[derive(Debug, Clone)] +pub struct NavigationTarget { + file_id: FileId, + name: SmolStr, + kind: SyntaxKind, + full_range: TextRange, + focus_range: Option, + container_name: Option, + description: Option, + docs: Option, +} + +pub(crate) trait ToNav { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget; +} + +impl NavigationTarget { + /// When `focus_range` is specified, returns it. otherwise + /// returns `full_range` + pub fn range(&self) -> TextRange { + self.focus_range.unwrap_or(self.full_range) + } + + pub fn name(&self) -> &SmolStr { + &self.name + } + + pub fn container_name(&self) -> Option<&SmolStr> { + self.container_name.as_ref() + } + + pub fn kind(&self) -> SyntaxKind { + self.kind + } + + pub fn file_id(&self) -> FileId { + self.file_id + } + + pub fn full_range(&self) -> TextRange { + self.full_range + } + + pub fn docs(&self) -> Option<&str> { + self.docs.as_ref().map(String::as_str) + } + + pub fn description(&self) -> Option<&str> { + self.description.as_ref().map(String::as_str) + } + + /// A "most interesting" range withing the `full_range`. + /// + /// Typically, `full_range` is the whole syntax node, + /// including doc comments, and `focus_range` is the range of the identifier. + pub fn focus_range(&self) -> Option { + self.focus_range + } + + pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget { + let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default(); + if let Some(src) = module.declaration_source(db) { + let frange = original_range(db, src.as_ref().map(|it| it.syntax())); + return NavigationTarget::from_syntax( + frange.file_id, + name, + None, + frange.range, + src.value.syntax().kind(), + src.value.doc_comment_text(), + src.value.short_label(), + ); + } + module.to_nav(db) + } + + pub(crate) fn from_def( + db: &RootDatabase, + module_def: hir::ModuleDef, + ) -> Option { + let nav = match module_def { + hir::ModuleDef::Module(module) => module.to_nav(db), + hir::ModuleDef::Function(it) => it.to_nav(db), + hir::ModuleDef::Adt(it) => it.to_nav(db), + hir::ModuleDef::Const(it) => it.to_nav(db), + hir::ModuleDef::Static(it) => it.to_nav(db), + hir::ModuleDef::EnumVariant(it) => it.to_nav(db), + hir::ModuleDef::Trait(it) => it.to_nav(db), + hir::ModuleDef::TypeAlias(it) => it.to_nav(db), + hir::ModuleDef::BuiltinType(..) => { + return None; + } + }; + Some(nav) + } + + #[cfg(test)] + pub(crate) fn assert_match(&self, expected: &str) { + let actual = self.debug_render(); + test_utils::assert_eq_text!(expected.trim(), actual.trim(),); + } + + #[cfg(test)] + pub(crate) fn debug_render(&self) -> String { + let mut buf = format!( + "{} {:?} {:?} {:?}", + self.name(), + self.kind(), + self.file_id(), + self.full_range() + ); + if let Some(focus_range) = self.focus_range() { + buf.push_str(&format!(" {:?}", focus_range)) + } + if let Some(container_name) = self.container_name() { + buf.push_str(&format!(" {}", container_name)) + } + buf + } + + /// Allows `NavigationTarget` to be created from a `NameOwner` + pub(crate) fn from_named( + db: &RootDatabase, + node: Source<&dyn ast::NameOwner>, + docs: Option, + description: Option, + ) -> NavigationTarget { + //FIXME: use `_` instead of empty string + let name = node.value.name().map(|it| it.text().clone()).unwrap_or_default(); + let focus_range = + node.value.name().map(|it| original_range(db, node.with_value(it.syntax())).range); + let frange = original_range(db, node.map(|it| it.syntax())); + + NavigationTarget::from_syntax( + frange.file_id, + name, + focus_range, + frange.range, + node.value.syntax().kind(), + docs, + description, + ) + } + + fn from_syntax( + file_id: FileId, + name: SmolStr, + focus_range: Option, + full_range: TextRange, + kind: SyntaxKind, + docs: Option, + description: Option, + ) -> NavigationTarget { + NavigationTarget { + file_id, + name, + kind, + full_range, + focus_range, + container_name: None, + description, + docs, + } + } +} + +impl ToNav for FileSymbol { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + NavigationTarget { + file_id: self.file_id, + name: self.name.clone(), + kind: self.ptr.kind(), + full_range: self.ptr.range(), + focus_range: self.name_range, + container_name: self.container_name.clone(), + description: description_from_symbol(db, self), + docs: docs_from_symbol(db, self), + } + } +} + +pub(crate) trait ToNavFromAst {} +impl ToNavFromAst for hir::Function {} +impl ToNavFromAst for hir::Const {} +impl ToNavFromAst for hir::Static {} +impl ToNavFromAst for hir::Struct {} +impl ToNavFromAst for hir::Enum {} +impl ToNavFromAst for hir::EnumVariant {} +impl ToNavFromAst for hir::Union {} +impl ToNavFromAst for hir::TypeAlias {} +impl ToNavFromAst for hir::Trait {} + +impl ToNav for D +where + D: HasSource + ToNavFromAst + Copy, + D::Ast: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, +{ + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.source(db); + NavigationTarget::from_named( + db, + src.as_ref().map(|it| it as &dyn ast::NameOwner), + src.value.doc_comment_text(), + src.value.short_label(), + ) + } +} + +impl ToNav for hir::Module { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.definition_source(db); + let name = self.name(db).map(|it| it.to_string().into()).unwrap_or_default(); + match &src.value { + ModuleSource::SourceFile(node) => { + let frange = original_range(db, src.with_value(node.syntax())); + + NavigationTarget::from_syntax( + frange.file_id, + name, + None, + frange.range, + node.syntax().kind(), + None, + None, + ) + } + ModuleSource::Module(node) => { + let frange = original_range(db, src.with_value(node.syntax())); + + NavigationTarget::from_syntax( + frange.file_id, + name, + None, + frange.range, + node.syntax().kind(), + node.doc_comment_text(), + node.short_label(), + ) + } + } + } +} + +impl ToNav for hir::ImplBlock { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.source(db); + let frange = original_range(db, src.as_ref().map(|it| it.syntax())); + + NavigationTarget::from_syntax( + frange.file_id, + "impl".into(), + None, + frange.range, + src.value.syntax().kind(), + None, + None, + ) + } +} + +impl ToNav for hir::StructField { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.source(db); + + match &src.value { + FieldSource::Named(it) => NavigationTarget::from_named( + db, + src.with_value(it), + it.doc_comment_text(), + it.short_label(), + ), + FieldSource::Pos(it) => { + let frange = original_range(db, src.with_value(it.syntax())); + NavigationTarget::from_syntax( + frange.file_id, + "".into(), + None, + frange.range, + it.syntax().kind(), + None, + None, + ) + } + } + } +} + +impl ToNav for hir::MacroDef { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.source(db); + log::debug!("nav target {:#?}", src.value.syntax()); + NavigationTarget::from_named( + db, + src.as_ref().map(|it| it as &dyn ast::NameOwner), + src.value.doc_comment_text(), + None, + ) + } +} + +impl ToNav for hir::Adt { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + match self { + hir::Adt::Struct(it) => it.to_nav(db), + hir::Adt::Union(it) => it.to_nav(db), + hir::Adt::Enum(it) => it.to_nav(db), + } + } +} + +impl ToNav for hir::AssocItem { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + match self { + AssocItem::Function(it) => it.to_nav(db), + AssocItem::Const(it) => it.to_nav(db), + AssocItem::TypeAlias(it) => it.to_nav(db), + } + } +} + +impl ToNav for hir::Local { + fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + let src = self.source(db); + let (full_range, focus_range) = match src.value { + Either::A(it) => { + (it.syntax().text_range(), it.name().map(|it| it.syntax().text_range())) + } + Either::B(it) => (it.syntax().text_range(), Some(it.self_kw_token().text_range())), + }; + let name = match self.name(db) { + Some(it) => it.to_string().into(), + None => "".into(), + }; + NavigationTarget { + file_id: src.file_id.original_file(db), + name, + kind: BIND_PAT, + full_range, + focus_range, + container_name: None, + description: None, + docs: None, + } + } +} + +pub(crate) fn docs_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option { + let parse = db.parse(symbol.file_id); + let node = symbol.ptr.to_node(parse.tree().syntax()); + + match_ast! { + match node { + ast::FnDef(it) => { it.doc_comment_text() }, + ast::StructDef(it) => { it.doc_comment_text() }, + ast::EnumDef(it) => { it.doc_comment_text() }, + ast::TraitDef(it) => { it.doc_comment_text() }, + ast::Module(it) => { it.doc_comment_text() }, + ast::TypeAliasDef(it) => { it.doc_comment_text() }, + ast::ConstDef(it) => { it.doc_comment_text() }, + ast::StaticDef(it) => { it.doc_comment_text() }, + ast::RecordFieldDef(it) => { it.doc_comment_text() }, + ast::EnumVariant(it) => { it.doc_comment_text() }, + ast::MacroCall(it) => { it.doc_comment_text() }, + _ => None, + } + } +} + +/// Get a description of a symbol. +/// +/// e.g. `struct Name`, `enum Name`, `fn Name` +pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option { + let parse = db.parse(symbol.file_id); + let node = symbol.ptr.to_node(parse.tree().syntax()); + + match_ast! { + match node { + ast::FnDef(it) => { it.short_label() }, + ast::StructDef(it) => { it.short_label() }, + ast::EnumDef(it) => { it.short_label() }, + ast::TraitDef(it) => { it.short_label() }, + ast::Module(it) => { it.short_label() }, + ast::TypeAliasDef(it) => { it.short_label() }, + ast::ConstDef(it) => { it.short_label() }, + ast::StaticDef(it) => { it.short_label() }, + ast::RecordFieldDef(it) => { it.short_label() }, + ast::EnumVariant(it) => { it.short_label() }, + _ => None, + } + } +} diff --git a/crates/ra_ide/src/display/short_label.rs b/crates/ra_ide/src/display/short_label.rs new file mode 100644 index 000000000..9ffc9b980 --- /dev/null +++ b/crates/ra_ide/src/display/short_label.rs @@ -0,0 +1,97 @@ +//! FIXME: write short doc here + +use format_buf::format; +use ra_syntax::ast::{self, AstNode, NameOwner, TypeAscriptionOwner, VisibilityOwner}; + +pub(crate) trait ShortLabel { + fn short_label(&self) -> Option; +} + +impl ShortLabel for ast::FnDef { + fn short_label(&self) -> Option { + Some(crate::display::function_label(self)) + } +} + +impl ShortLabel for ast::StructDef { + fn short_label(&self) -> Option { + short_label_from_node(self, "struct ") + } +} + +impl ShortLabel for ast::UnionDef { + fn short_label(&self) -> Option { + short_label_from_node(self, "union ") + } +} + +impl ShortLabel for ast::EnumDef { + fn short_label(&self) -> Option { + short_label_from_node(self, "enum ") + } +} + +impl ShortLabel for ast::TraitDef { + fn short_label(&self) -> Option { + short_label_from_node(self, "trait ") + } +} + +impl ShortLabel for ast::Module { + fn short_label(&self) -> Option { + short_label_from_node(self, "mod ") + } +} + +impl ShortLabel for ast::TypeAliasDef { + fn short_label(&self) -> Option { + short_label_from_node(self, "type ") + } +} + +impl ShortLabel for ast::ConstDef { + fn short_label(&self) -> Option { + short_label_from_ascribed_node(self, "const ") + } +} + +impl ShortLabel for ast::StaticDef { + fn short_label(&self) -> Option { + short_label_from_ascribed_node(self, "static ") + } +} + +impl ShortLabel for ast::RecordFieldDef { + fn short_label(&self) -> Option { + short_label_from_ascribed_node(self, "") + } +} + +impl ShortLabel for ast::EnumVariant { + fn short_label(&self) -> Option { + Some(self.name()?.text().to_string()) + } +} + +fn short_label_from_ascribed_node(node: &T, prefix: &str) -> Option +where + T: NameOwner + VisibilityOwner + TypeAscriptionOwner, +{ + let mut buf = short_label_from_node(node, prefix)?; + + if let Some(type_ref) = node.ascribed_type() { + format!(buf, ": {}", type_ref.syntax()); + } + + Some(buf) +} + +fn short_label_from_node(node: &T, label: &str) -> Option +where + T: NameOwner + VisibilityOwner, +{ + let mut buf = node.visibility().map(|v| format!("{} ", v.syntax())).unwrap_or_default(); + buf.push_str(label); + buf.push_str(node.name()?.text().as_str()); + Some(buf) +} diff --git a/crates/ra_ide/src/display/structure.rs b/crates/ra_ide/src/display/structure.rs new file mode 100644 index 000000000..a80d65ac7 --- /dev/null +++ b/crates/ra_ide/src/display/structure.rs @@ -0,0 +1,401 @@ +//! FIXME: write short doc here + +use crate::TextRange; + +use ra_syntax::{ + ast::{self, AttrsOwner, NameOwner, TypeAscriptionOwner, TypeParamsOwner}, + match_ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, WalkEvent, +}; + +#[derive(Debug, Clone)] +pub struct StructureNode { + pub parent: Option, + pub label: String, + pub navigation_range: TextRange, + pub node_range: TextRange, + pub kind: SyntaxKind, + pub detail: Option, + pub deprecated: bool, +} + +pub fn file_structure(file: &SourceFile) -> Vec { + let mut res = Vec::new(); + let mut stack = Vec::new(); + + for event in file.syntax().preorder() { + match event { + WalkEvent::Enter(node) => { + if let Some(mut symbol) = structure_node(&node) { + symbol.parent = stack.last().copied(); + stack.push(res.len()); + res.push(symbol); + } + } + WalkEvent::Leave(node) => { + if structure_node(&node).is_some() { + stack.pop().unwrap(); + } + } + } + } + res +} + +fn structure_node(node: &SyntaxNode) -> Option { + fn decl(node: N) -> Option { + decl_with_detail(node, None) + } + + fn decl_with_ascription( + node: N, + ) -> Option { + let ty = node.ascribed_type(); + decl_with_type_ref(node, ty) + } + + fn decl_with_type_ref( + node: N, + type_ref: Option, + ) -> Option { + let detail = type_ref.map(|type_ref| { + let mut detail = String::new(); + collapse_ws(type_ref.syntax(), &mut detail); + detail + }); + decl_with_detail(node, detail) + } + + fn decl_with_detail( + node: N, + detail: Option, + ) -> Option { + let name = node.name()?; + + Some(StructureNode { + parent: None, + label: name.text().to_string(), + navigation_range: name.syntax().text_range(), + node_range: node.syntax().text_range(), + kind: node.syntax().kind(), + detail, + deprecated: node.attrs().filter_map(|x| x.simple_name()).any(|x| x == "deprecated"), + }) + } + + fn collapse_ws(node: &SyntaxNode, output: &mut String) { + let mut can_insert_ws = false; + node.text().for_each_chunk(|chunk| { + for line in chunk.lines() { + let line = line.trim(); + if line.is_empty() { + if can_insert_ws { + output.push(' '); + can_insert_ws = false; + } + } else { + output.push_str(line); + can_insert_ws = true; + } + } + }) + } + + match_ast! { + match node { + ast::FnDef(it) => { + let mut detail = String::from("fn"); + if let Some(type_param_list) = it.type_param_list() { + collapse_ws(type_param_list.syntax(), &mut detail); + } + if let Some(param_list) = it.param_list() { + collapse_ws(param_list.syntax(), &mut detail); + } + if let Some(ret_type) = it.ret_type() { + detail.push_str(" "); + collapse_ws(ret_type.syntax(), &mut detail); + } + + decl_with_detail(it, Some(detail)) + }, + ast::StructDef(it) => { decl(it) }, + ast::EnumDef(it) => { decl(it) }, + ast::EnumVariant(it) => { decl(it) }, + ast::TraitDef(it) => { decl(it) }, + ast::Module(it) => { decl(it) }, + ast::TypeAliasDef(it) => { + let ty = it.type_ref(); + decl_with_type_ref(it, ty) + }, + ast::RecordFieldDef(it) => { decl_with_ascription(it) }, + ast::ConstDef(it) => { decl_with_ascription(it) }, + ast::StaticDef(it) => { decl_with_ascription(it) }, + ast::ImplBlock(it) => { + let target_type = it.target_type()?; + let target_trait = it.target_trait(); + let label = match target_trait { + None => format!("impl {}", target_type.syntax().text()), + Some(t) => { + format!("impl {} for {}", t.syntax().text(), target_type.syntax().text(),) + } + }; + + let node = StructureNode { + parent: None, + label, + navigation_range: target_type.syntax().text_range(), + node_range: it.syntax().text_range(), + kind: it.syntax().kind(), + detail: None, + deprecated: false, + }; + Some(node) + }, + ast::MacroCall(it) => { + let first_token = it.syntax().first_token().unwrap(); + if first_token.text().as_str() != "macro_rules" { + return None; + } + decl(it) + }, + _ => None, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use insta::assert_debug_snapshot; + + #[test] + fn test_file_structure() { + let file = SourceFile::parse( + r#" +struct Foo { + x: i32 +} + +mod m { + fn bar1() {} + fn bar2(t: T) -> T {} + fn bar3(a: A, + b: B) -> Vec< + u32 + > {} +} + +enum E { X, Y(i32) } +type T = (); +static S: i32 = 92; +const C: i32 = 92; + +impl E {} + +impl fmt::Debug for E {} + +macro_rules! mc { + () => {} +} + +#[deprecated] +fn obsolete() {} + +#[deprecated(note = "for awhile")] +fn very_obsolete() {} +"#, + ) + .ok() + .unwrap(); + let structure = file_structure(&file); + assert_debug_snapshot!(structure, + @r###" + [ + StructureNode { + parent: None, + label: "Foo", + navigation_range: [8; 11), + node_range: [1; 26), + kind: STRUCT_DEF, + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 0, + ), + label: "x", + navigation_range: [18; 19), + node_range: [18; 24), + kind: RECORD_FIELD_DEF, + detail: Some( + "i32", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "m", + navigation_range: [32; 33), + node_range: [28; 158), + kind: MODULE, + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 2, + ), + label: "bar1", + navigation_range: [43; 47), + node_range: [40; 52), + kind: FN_DEF, + detail: Some( + "fn()", + ), + deprecated: false, + }, + StructureNode { + parent: Some( + 2, + ), + label: "bar2", + navigation_range: [60; 64), + node_range: [57; 81), + kind: FN_DEF, + detail: Some( + "fn(t: T) -> T", + ), + deprecated: false, + }, + StructureNode { + parent: Some( + 2, + ), + label: "bar3", + navigation_range: [89; 93), + node_range: [86; 156), + kind: FN_DEF, + detail: Some( + "fn(a: A, b: B) -> Vec< u32 >", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "E", + navigation_range: [165; 166), + node_range: [160; 180), + kind: ENUM_DEF, + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 6, + ), + label: "X", + navigation_range: [169; 170), + node_range: [169; 170), + kind: ENUM_VARIANT, + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 6, + ), + label: "Y", + navigation_range: [172; 173), + node_range: [172; 178), + kind: ENUM_VARIANT, + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "T", + navigation_range: [186; 187), + node_range: [181; 193), + kind: TYPE_ALIAS_DEF, + detail: Some( + "()", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "S", + navigation_range: [201; 202), + node_range: [194; 213), + kind: STATIC_DEF, + detail: Some( + "i32", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "C", + navigation_range: [220; 221), + node_range: [214; 232), + kind: CONST_DEF, + detail: Some( + "i32", + ), + deprecated: false, + }, + StructureNode { + parent: None, + label: "impl E", + navigation_range: [239; 240), + node_range: [234; 243), + kind: IMPL_BLOCK, + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "impl fmt::Debug for E", + navigation_range: [265; 266), + node_range: [245; 269), + kind: IMPL_BLOCK, + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "mc", + navigation_range: [284; 286), + node_range: [271; 303), + kind: MACRO_CALL, + detail: None, + deprecated: false, + }, + StructureNode { + parent: None, + label: "obsolete", + navigation_range: [322; 330), + node_range: [305; 335), + kind: FN_DEF, + detail: Some( + "fn()", + ), + deprecated: true, + }, + StructureNode { + parent: None, + label: "very_obsolete", + navigation_range: [375; 388), + node_range: [337; 393), + kind: FN_DEF, + detail: Some( + "fn()", + ), + deprecated: true, + }, + ] + "### + ); + } +} diff --git a/crates/ra_ide/src/expand.rs b/crates/ra_ide/src/expand.rs new file mode 100644 index 000000000..2f1abf509 --- /dev/null +++ b/crates/ra_ide/src/expand.rs @@ -0,0 +1,63 @@ +//! Utilities to work with files, produced by macros. +use std::iter::successors; + +use hir::Source; +use ra_db::FileId; +use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken}; + +use crate::{db::RootDatabase, FileRange}; + +pub(crate) fn original_range(db: &RootDatabase, node: Source<&SyntaxNode>) -> FileRange { + let expansion = match node.file_id.expansion_info(db) { + None => { + return FileRange { + file_id: node.file_id.original_file(db), + range: node.value.text_range(), + } + } + Some(it) => it, + }; + // FIXME: the following completely wrong. + // + // *First*, we should try to map first and last tokens of node, and, if that + // fails, return the range of the overall macro expansions. + // + // *Second*, we should handle recurside macro expansions + + let token = node + .value + .descendants_with_tokens() + .filter_map(|it| it.into_token()) + .find_map(|it| expansion.map_token_up(node.with_value(&it))); + + match token { + Some(it) => { + FileRange { file_id: it.file_id.original_file(db), range: it.value.text_range() } + } + None => { + FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() } + } + } +} + +pub(crate) fn descend_into_macros( + db: &RootDatabase, + file_id: FileId, + token: SyntaxToken, +) -> Source { + let src = Source::new(file_id.into(), token); + + successors(Some(src), |token| { + let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; + let tt = macro_call.token_tree()?; + if !token.value.text_range().is_subrange(&tt.syntax().text_range()) { + return None; + } + let source_analyzer = + hir::SourceAnalyzer::new(db, token.with_value(token.value.parent()).as_ref(), None); + let exp = source_analyzer.expand(db, token.with_value(¯o_call))?; + exp.map_token_down(db, token.as_ref()) + }) + .last() + .unwrap() +} diff --git a/crates/ra_ide/src/expand_macro.rs b/crates/ra_ide/src/expand_macro.rs new file mode 100644 index 000000000..abc602244 --- /dev/null +++ b/crates/ra_ide/src/expand_macro.rs @@ -0,0 +1,295 @@ +//! This modules implements "expand macro" functionality in the IDE + +use crate::{db::RootDatabase, FilePosition}; +use hir::db::AstDatabase; +use ra_db::SourceDatabase; +use rustc_hash::FxHashMap; + +use ra_syntax::{ + algo::{find_node_at_offset, replace_descendants}, + ast::{self}, + AstNode, NodeOrToken, SyntaxKind, SyntaxNode, WalkEvent, T, +}; + +pub struct ExpandedMacro { + pub name: String, + pub expansion: String, +} + +pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option { + let parse = db.parse(position.file_id); + let file = parse.tree(); + let name_ref = find_node_at_offset::(file.syntax(), position.offset)?; + let mac = name_ref.syntax().ancestors().find_map(ast::MacroCall::cast)?; + + let source = hir::Source::new(position.file_id.into(), mac.syntax()); + let expanded = expand_macro_recur(db, source, source.with_value(&mac))?; + + // FIXME: + // macro expansion may lose all white space information + // But we hope someday we can use ra_fmt for that + let expansion = insert_whitespaces(expanded); + Some(ExpandedMacro { name: name_ref.text().to_string(), expansion }) +} + +fn expand_macro_recur( + db: &RootDatabase, + source: hir::Source<&SyntaxNode>, + macro_call: hir::Source<&ast::MacroCall>, +) -> Option { + let analyzer = hir::SourceAnalyzer::new(db, source, None); + let expansion = analyzer.expand(db, macro_call)?; + let macro_file_id = expansion.file_id(); + let mut expanded: SyntaxNode = db.parse_or_expand(macro_file_id)?; + + let children = expanded.descendants().filter_map(ast::MacroCall::cast); + let mut replaces = FxHashMap::default(); + + for child in children.into_iter() { + let node = hir::Source::new(macro_file_id, &child); + if let Some(new_node) = expand_macro_recur(db, source, node) { + // Replace the whole node if it is root + // `replace_descendants` will not replace the parent node + // but `SyntaxNode::descendants include itself + if expanded == *child.syntax() { + expanded = new_node; + } else { + replaces.insert(child.syntax().clone().into(), new_node.into()); + } + } + } + + Some(replace_descendants(&expanded, &replaces)) +} + +// FIXME: It would also be cool to share logic here and in the mbe tests, +// which are pretty unreadable at the moment. +fn insert_whitespaces(syn: SyntaxNode) -> String { + use SyntaxKind::*; + + let mut res = String::new(); + let mut token_iter = syn + .preorder_with_tokens() + .filter_map(|event| { + if let WalkEvent::Enter(NodeOrToken::Token(token)) = event { + Some(token) + } else { + None + } + }) + .peekable(); + + let mut indent = 0; + let mut last: Option = None; + + while let Some(token) = token_iter.next() { + let mut is_next = |f: fn(SyntaxKind) -> bool, default| -> bool { + token_iter.peek().map(|it| f(it.kind())).unwrap_or(default) + }; + let is_last = |f: fn(SyntaxKind) -> bool, default| -> bool { + last.map(|it| f(it)).unwrap_or(default) + }; + + res += &match token.kind() { + k @ _ if is_text(k) && is_next(|it| !it.is_punct(), true) => { + token.text().to_string() + " " + } + L_CURLY if is_next(|it| it != R_CURLY, true) => { + indent += 1; + let leading_space = if is_last(|it| is_text(it), false) { " " } else { "" }; + format!("{}{{\n{}", leading_space, " ".repeat(indent)) + } + R_CURLY if is_last(|it| it != L_CURLY, true) => { + indent = indent.checked_sub(1).unwrap_or(0); + format!("\n{}}}", " ".repeat(indent)) + } + R_CURLY => format!("}}\n{}", " ".repeat(indent)), + T![;] => format!(";\n{}", " ".repeat(indent)), + T![->] => " -> ".to_string(), + T![=] => " = ".to_string(), + T![=>] => " => ".to_string(), + _ => token.text().to_string(), + }; + + last = Some(token.kind()); + } + + return res; + + fn is_text(k: SyntaxKind) -> bool { + k.is_keyword() || k.is_literal() || k == IDENT + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::mock_analysis::analysis_and_position; + use insta::assert_snapshot; + + fn check_expand_macro(fixture: &str) -> ExpandedMacro { + let (analysis, pos) = analysis_and_position(fixture); + analysis.expand_macro(pos).unwrap().unwrap() + } + + #[test] + fn macro_expand_recursive_expansion() { + let res = check_expand_macro( + r#" + //- /lib.rs + macro_rules! bar { + () => { fn b() {} } + } + macro_rules! foo { + () => { bar!(); } + } + macro_rules! baz { + () => { foo!(); } + } + f<|>oo!(); + "#, + ); + + assert_eq!(res.name, "foo"); + assert_snapshot!(res.expansion, @r###" +fn b(){} +"###); + } + + #[test] + fn macro_expand_multiple_lines() { + let res = check_expand_macro( + r#" + //- /lib.rs + macro_rules! foo { + () => { + fn some_thing() -> u32 { + let a = 0; + a + 10 + } + } + } + f<|>oo!(); + "#, + ); + + assert_eq!(res.name, "foo"); + assert_snapshot!(res.expansion, @r###" +fn some_thing() -> u32 { + let a = 0; + a+10 +} +"###); + } + + #[test] + fn macro_expand_match_ast() { + let res = check_expand_macro( + r#" + //- /lib.rs + macro_rules! match_ast { + (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; + + (match ($node:expr) { + $( ast::$ast:ident($it:ident) => $res:block, )* + _ => $catch_all:expr $(,)? + }) => {{ + $( if let Some($it) = ast::$ast::cast($node.clone()) $res else )* + { $catch_all } + }}; + } + + fn main() { + mat<|>ch_ast! { + match container { + ast::TraitDef(it) => {}, + ast::ImplBlock(it) => {}, + _ => { continue }, + } + } + } + "#, + ); + + assert_eq!(res.name, "match_ast"); + assert_snapshot!(res.expansion, @r###" +{ + if let Some(it) = ast::TraitDef::cast(container.clone()){} + else if let Some(it) = ast::ImplBlock::cast(container.clone()){} + else { + { + continue + } + } +} +"###); + } + + #[test] + fn macro_expand_match_ast_inside_let_statement() { + let res = check_expand_macro( + r#" + //- /lib.rs + macro_rules! match_ast { + (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; + (match ($node:expr) {}) => {{}}; + } + + fn main() { + let p = f(|it| { + let res = mat<|>ch_ast! { match c {}}; + Some(res) + })?; + } + "#, + ); + + assert_eq!(res.name, "match_ast"); + assert_snapshot!(res.expansion, @r###"{}"###); + } + + #[test] + fn macro_expand_inner_macro_fail_to_expand() { + let res = check_expand_macro( + r#" + //- /lib.rs + macro_rules! bar { + (BAD) => {}; + } + macro_rules! foo { + () => {bar!()}; + } + + fn main() { + let res = fo<|>o!(); + } + "#, + ); + + assert_eq!(res.name, "foo"); + assert_snapshot!(res.expansion, @r###"bar!()"###); + } + + #[test] + fn macro_expand_with_dollar_crate() { + let res = check_expand_macro( + r#" + //- /lib.rs + #[macro_export] + macro_rules! bar { + () => {0}; + } + macro_rules! foo { + () => {$crate::bar!()}; + } + + fn main() { + let res = fo<|>o!(); + } + "#, + ); + + assert_eq!(res.name, "foo"); + assert_snapshot!(res.expansion, @r###"0"###); + } +} diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs new file mode 100644 index 000000000..4b7bfc0b1 --- /dev/null +++ b/crates/ra_ide/src/extend_selection.rs @@ -0,0 +1,452 @@ +//! FIXME: write short doc here + +use ra_db::SourceDatabase; +use ra_syntax::{ + algo::find_covering_element, + ast::{self, AstNode, AstToken}, + Direction, NodeOrToken, + SyntaxKind::{self, *}, + SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T, +}; + +use crate::{db::RootDatabase, FileRange}; + +// FIXME: restore macro support +pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { + let parse = db.parse(frange.file_id); + try_extend_selection(parse.tree().syntax(), frange.range).unwrap_or(frange.range) +} + +fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option { + let string_kinds = [COMMENT, STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING]; + let list_kinds = [ + RECORD_FIELD_PAT_LIST, + MATCH_ARM_LIST, + RECORD_FIELD_DEF_LIST, + TUPLE_FIELD_DEF_LIST, + RECORD_FIELD_LIST, + ENUM_VARIANT_LIST, + USE_TREE_LIST, + TYPE_PARAM_LIST, + TYPE_ARG_LIST, + TYPE_BOUND_LIST, + PARAM_LIST, + ARG_LIST, + ARRAY_EXPR, + TUPLE_EXPR, + WHERE_CLAUSE, + ]; + + if range.is_empty() { + let offset = range.start(); + let mut leaves = root.token_at_offset(offset); + if leaves.clone().all(|it| it.kind() == WHITESPACE) { + return Some(extend_ws(root, leaves.next()?, offset)); + } + let leaf_range = match leaves { + TokenAtOffset::None => return None, + TokenAtOffset::Single(l) => { + if string_kinds.contains(&l.kind()) { + extend_single_word_in_comment_or_string(&l, offset) + .unwrap_or_else(|| l.text_range()) + } else { + l.text_range() + } + } + TokenAtOffset::Between(l, r) => pick_best(l, r).text_range(), + }; + return Some(leaf_range); + }; + let node = match find_covering_element(root, range) { + NodeOrToken::Token(token) => { + if token.text_range() != range { + return Some(token.text_range()); + } + if let Some(comment) = ast::Comment::cast(token.clone()) { + if let Some(range) = extend_comments(comment) { + return Some(range); + } + } + token.parent() + } + NodeOrToken::Node(node) => node, + }; + if node.text_range() != range { + return Some(node.text_range()); + } + + // Using shallowest node with same range allows us to traverse siblings. + let node = node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap(); + + if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { + if let Some(range) = extend_list_item(&node) { + return Some(range); + } + } + + node.parent().map(|it| it.text_range()) +} + +fn extend_single_word_in_comment_or_string( + leaf: &SyntaxToken, + offset: TextUnit, +) -> Option { + let text: &str = leaf.text(); + let cursor_position: u32 = (offset - leaf.text_range().start()).into(); + + let (before, after) = text.split_at(cursor_position as usize); + + fn non_word_char(c: char) -> bool { + !(c.is_alphanumeric() || c == '_') + } + + let start_idx = before.rfind(non_word_char)? as u32; + let end_idx = after.find(non_word_char).unwrap_or_else(|| after.len()) as u32; + + let from: TextUnit = (start_idx + 1).into(); + let to: TextUnit = (cursor_position + end_idx).into(); + + let range = TextRange::from_to(from, to); + if range.is_empty() { + None + } else { + Some(range + leaf.text_range().start()) + } +} + +fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextUnit) -> TextRange { + let ws_text = ws.text(); + let suffix = TextRange::from_to(offset, ws.text_range().end()) - ws.text_range().start(); + let prefix = TextRange::from_to(ws.text_range().start(), offset) - ws.text_range().start(); + let ws_suffix = &ws_text.as_str()[suffix]; + let ws_prefix = &ws_text.as_str()[prefix]; + if ws_text.contains('\n') && !ws_suffix.contains('\n') { + if let Some(node) = ws.next_sibling_or_token() { + let start = match ws_prefix.rfind('\n') { + Some(idx) => ws.text_range().start() + TextUnit::from((idx + 1) as u32), + None => node.text_range().start(), + }; + let end = if root.text().char_at(node.text_range().end()) == Some('\n') { + node.text_range().end() + TextUnit::of_char('\n') + } else { + node.text_range().end() + }; + return TextRange::from_to(start, end); + } + } + ws.text_range() +} + +fn pick_best<'a>(l: SyntaxToken, r: SyntaxToken) -> SyntaxToken { + return if priority(&r) > priority(&l) { r } else { l }; + fn priority(n: &SyntaxToken) -> usize { + match n.kind() { + WHITESPACE => 0, + IDENT | T![self] | T![super] | T![crate] | LIFETIME => 2, + _ => 1, + } + } +} + +/// Extend list item selection to include nearby delimiter and whitespace. +fn extend_list_item(node: &SyntaxNode) -> Option { + fn is_single_line_ws(node: &SyntaxToken) -> bool { + node.kind() == WHITESPACE && !node.text().contains('\n') + } + + fn nearby_delimiter( + delimiter_kind: SyntaxKind, + node: &SyntaxNode, + dir: Direction, + ) -> Option { + node.siblings_with_tokens(dir) + .skip(1) + .skip_while(|node| match node { + NodeOrToken::Node(_) => false, + NodeOrToken::Token(it) => is_single_line_ws(it), + }) + .next() + .and_then(|it| it.into_token()) + .filter(|node| node.kind() == delimiter_kind) + } + + let delimiter = match node.kind() { + TYPE_BOUND => T![+], + _ => T![,], + }; + if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Prev) { + return Some(TextRange::from_to( + delimiter_node.text_range().start(), + node.text_range().end(), + )); + } + if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Next) { + // Include any following whitespace when delimiter is after list item. + let final_node = delimiter_node + .next_sibling_or_token() + .and_then(|it| it.into_token()) + .filter(|node| is_single_line_ws(node)) + .unwrap_or(delimiter_node); + + return Some(TextRange::from_to(node.text_range().start(), final_node.text_range().end())); + } + + None +} + +fn extend_comments(comment: ast::Comment) -> Option { + let prev = adj_comments(&comment, Direction::Prev); + let next = adj_comments(&comment, Direction::Next); + if prev != next { + Some(TextRange::from_to( + prev.syntax().text_range().start(), + next.syntax().text_range().end(), + )) + } else { + None + } +} + +fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment { + let mut res = comment.clone(); + for element in comment.syntax().siblings_with_tokens(dir) { + let token = match element.as_token() { + None => break, + Some(token) => token, + }; + if let Some(c) = ast::Comment::cast(token.clone()) { + res = c + } else if token.kind() != WHITESPACE || token.text().contains("\n\n") { + break; + } + } + res +} + +#[cfg(test)] +mod tests { + use ra_syntax::{AstNode, SourceFile}; + use test_utils::extract_offset; + + use super::*; + + fn do_check(before: &str, afters: &[&str]) { + let (cursor, before) = extract_offset(before); + let parse = SourceFile::parse(&before); + let mut range = TextRange::offset_len(cursor, 0.into()); + for &after in afters { + range = try_extend_selection(parse.tree().syntax(), range).unwrap(); + let actual = &before[range]; + assert_eq!(after, actual); + } + } + + #[test] + fn test_extend_selection_arith() { + do_check(r#"fn foo() { <|>1 + 1 }"#, &["1", "1 + 1", "{ 1 + 1 }"]); + } + + #[test] + fn test_extend_selection_list() { + do_check(r#"fn foo(<|>x: i32) {}"#, &["x", "x: i32"]); + do_check(r#"fn foo(<|>x: i32, y: i32) {}"#, &["x", "x: i32", "x: i32, "]); + do_check(r#"fn foo(<|>x: i32,y: i32) {}"#, &["x", "x: i32", "x: i32,"]); + do_check(r#"fn foo(x: i32, <|>y: i32) {}"#, &["y", "y: i32", ", y: i32"]); + do_check(r#"fn foo(x: i32, <|>y: i32, ) {}"#, &["y", "y: i32", ", y: i32"]); + do_check(r#"fn foo(x: i32,<|>y: i32) {}"#, &["y", "y: i32", ",y: i32"]); + + do_check(r#"const FOO: [usize; 2] = [ 22<|> , 33];"#, &["22", "22 , "]); + do_check(r#"const FOO: [usize; 2] = [ 22 , 33<|>];"#, &["33", ", 33"]); + do_check(r#"const FOO: [usize; 2] = [ 22 , 33<|> ,];"#, &["33", ", 33"]); + + do_check(r#"fn main() { (1, 2<|>) }"#, &["2", ", 2", "(1, 2)"]); + + do_check( + r#" +const FOO: [usize; 2] = [ + 22, + <|>33, +]"#, + &["33", "33,"], + ); + + do_check( + r#" +const FOO: [usize; 2] = [ + 22 + , 33<|>, +]"#, + &["33", ", 33"], + ); + } + + #[test] + fn test_extend_selection_start_of_the_line() { + do_check( + r#" +impl S { +<|> fn foo() { + + } +}"#, + &[" fn foo() {\n\n }\n"], + ); + } + + #[test] + fn test_extend_selection_doc_comments() { + do_check( + r#" +struct A; + +/// bla +/// bla +struct B { + <|> +} + "#, + &["\n \n", "{\n \n}", "/// bla\n/// bla\nstruct B {\n \n}"], + ) + } + + #[test] + fn test_extend_selection_comments() { + do_check( + r#" +fn bar(){} + +// fn foo() { +// 1 + <|>1 +// } + +// fn foo(){} + "#, + &["1", "// 1 + 1", "// fn foo() {\n// 1 + 1\n// }"], + ); + + do_check( + r#" +// #[derive(Debug, Clone, Copy, PartialEq, Eq)] +// pub enum Direction { +// <|> Next, +// Prev +// } +"#, + &[ + "// Next,", + "// #[derive(Debug, Clone, Copy, PartialEq, Eq)]\n// pub enum Direction {\n// Next,\n// Prev\n// }", + ], + ); + + do_check( + r#" +/* +foo +_bar1<|>*/ +"#, + &["_bar1", "/*\nfoo\n_bar1*/"], + ); + + do_check(r#"//!<|>foo_2 bar"#, &["foo_2", "//!foo_2 bar"]); + + do_check(r#"/<|>/foo bar"#, &["//foo bar"]); + } + + #[test] + fn test_extend_selection_prefer_idents() { + do_check( + r#" +fn main() { foo<|>+bar;} +"#, + &["foo", "foo+bar"], + ); + do_check( + r#" +fn main() { foo+<|>bar;} +"#, + &["bar", "foo+bar"], + ); + } + + #[test] + fn test_extend_selection_prefer_lifetimes() { + do_check(r#"fn foo<<|>'a>() {}"#, &["'a", "<'a>"]); + do_check(r#"fn foo<'a<|>>() {}"#, &["'a", "<'a>"]); + } + + #[test] + fn test_extend_selection_select_first_word() { + do_check(r#"// foo bar b<|>az quxx"#, &["baz", "// foo bar baz quxx"]); + do_check( + r#" +impl S { +fn foo() { +// hel<|>lo world +} +} +"#, + &["hello", "// hello world"], + ); + } + + #[test] + fn test_extend_selection_string() { + do_check( + r#" +fn bar(){} + +" fn f<|>oo() {" +"#, + &["foo", "\" fn foo() {\""], + ); + } + + #[test] + fn test_extend_trait_bounds_list_in_where_clause() { + do_check( + r#" +fn foo() + where + R: req::Request + 'static, + R::Params: DeserializeOwned<|> + panic::UnwindSafe + 'static, + R::Result: Serialize + 'static, +"#, + &[ + "DeserializeOwned", + "DeserializeOwned + ", + "DeserializeOwned + panic::UnwindSafe + 'static", + "R::Params: DeserializeOwned + panic::UnwindSafe + 'static", + "R::Params: DeserializeOwned + panic::UnwindSafe + 'static,", + ], + ); + do_check(r#"fn foo() where T: <|>Copy"#, &["Copy"]); + do_check(r#"fn foo() where T: <|>Copy + Display"#, &["Copy", "Copy + "]); + do_check(r#"fn foo() where T: <|>Copy +Display"#, &["Copy", "Copy +"]); + do_check(r#"fn foo() where T: <|>Copy+Display"#, &["Copy", "Copy+"]); + do_check(r#"fn foo() where T: Copy + <|>Display"#, &["Display", "+ Display"]); + do_check(r#"fn foo() where T: Copy + <|>Display + Sync"#, &["Display", "+ Display"]); + do_check(r#"fn foo() where T: Copy +<|>Display"#, &["Display", "+Display"]); + } + + #[test] + fn test_extend_trait_bounds_list_inline() { + do_check(r#"fn fooCopy>() {}"#, &["Copy"]); + do_check(r#"fn fooCopy + Display>() {}"#, &["Copy", "Copy + "]); + do_check(r#"fn fooCopy +Display>() {}"#, &["Copy", "Copy +"]); + do_check(r#"fn fooCopy+Display>() {}"#, &["Copy", "Copy+"]); + do_check(r#"fn fooDisplay>() {}"#, &["Display", "+ Display"]); + do_check(r#"fn fooDisplay + Sync>() {}"#, &["Display", "+ Display"]); + do_check(r#"fn fooDisplay>() {}"#, &["Display", "+Display"]); + do_check( + r#"fn foo + Display, U: Copy>() {}"#, + &[ + "Copy", + "Copy + ", + "Copy + Display", + "T: Copy + Display", + "T: Copy + Display, ", + "", + ], + ); + } +} diff --git a/crates/ra_ide/src/feature_flags.rs b/crates/ra_ide/src/feature_flags.rs new file mode 100644 index 000000000..de4ae513d --- /dev/null +++ b/crates/ra_ide/src/feature_flags.rs @@ -0,0 +1,70 @@ +//! FIXME: write short doc here + +use rustc_hash::FxHashMap; + +/// Feature flags hold fine-grained toggles for all *user-visible* features of +/// rust-analyzer. +/// +/// The exists such that users are able to disable any annoying feature (and, +/// with many users and many features, some features are bound to be annoying +/// for some users) +/// +/// Note that we purposefully use run-time checked strings, and not something +/// checked at compile time, to keep things simple and flexible. +/// +/// Also note that, at the moment, `FeatureFlags` also store features for +/// `ra_lsp_server`. This should be benign layering violation. +#[derive(Debug)] +pub struct FeatureFlags { + flags: FxHashMap, +} + +impl FeatureFlags { + fn new(flags: &[(&str, bool)]) -> FeatureFlags { + let flags = flags + .iter() + .map(|&(name, value)| { + check_flag_name(name); + (name.to_string(), value) + }) + .collect(); + FeatureFlags { flags } + } + + pub fn set(&mut self, flag: &str, value: bool) -> Result<(), ()> { + match self.flags.get_mut(flag) { + None => Err(()), + Some(slot) => { + *slot = value; + Ok(()) + } + } + } + + pub fn get(&self, flag: &str) -> bool { + match self.flags.get(flag) { + None => panic!("unknown flag: {:?}", flag), + Some(value) => *value, + } + } +} + +impl Default for FeatureFlags { + fn default() -> FeatureFlags { + FeatureFlags::new(&[ + ("lsp.diagnostics", true), + ("completion.insertion.add-call-parenthesis", true), + ("completion.enable-postfix", true), + ("notifications.workspace-loaded", true), + ]) + } +} + +fn check_flag_name(flag: &str) { + for c in flag.bytes() { + match c { + b'a'..=b'z' | b'-' | b'.' => (), + _ => panic!("flag name does not match conventions: {:?}", flag), + } + } +} diff --git a/crates/ra_ide/src/folding_ranges.rs b/crates/ra_ide/src/folding_ranges.rs new file mode 100644 index 000000000..4eeb76d14 --- /dev/null +++ b/crates/ra_ide/src/folding_ranges.rs @@ -0,0 +1,378 @@ +//! FIXME: write short doc here + +use rustc_hash::FxHashSet; + +use ra_syntax::{ + ast::{self, AstNode, AstToken, VisibilityOwner}, + Direction, NodeOrToken, SourceFile, + SyntaxKind::{self, *}, + SyntaxNode, TextRange, +}; + +#[derive(Debug, PartialEq, Eq)] +pub enum FoldKind { + Comment, + Imports, + Mods, + Block, +} + +#[derive(Debug)] +pub struct Fold { + pub range: TextRange, + pub kind: FoldKind, +} + +pub(crate) fn folding_ranges(file: &SourceFile) -> Vec { + let mut res = vec![]; + let mut visited_comments = FxHashSet::default(); + let mut visited_imports = FxHashSet::default(); + let mut visited_mods = FxHashSet::default(); + + for element in file.syntax().descendants_with_tokens() { + // Fold items that span multiple lines + if let Some(kind) = fold_kind(element.kind()) { + let is_multiline = match &element { + NodeOrToken::Node(node) => node.text().contains_char('\n'), + NodeOrToken::Token(token) => token.text().contains('\n'), + }; + if is_multiline { + res.push(Fold { range: element.text_range(), kind }); + continue; + } + } + + match element { + NodeOrToken::Token(token) => { + // Fold groups of comments + if let Some(comment) = ast::Comment::cast(token) { + if !visited_comments.contains(&comment) { + if let Some(range) = + contiguous_range_for_comment(comment, &mut visited_comments) + { + res.push(Fold { range, kind: FoldKind::Comment }) + } + } + } + } + NodeOrToken::Node(node) => { + // Fold groups of imports + if node.kind() == USE_ITEM && !visited_imports.contains(&node) { + if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) { + res.push(Fold { range, kind: FoldKind::Imports }) + } + } + + // Fold groups of mods + if node.kind() == MODULE && !has_visibility(&node) && !visited_mods.contains(&node) + { + if let Some(range) = + contiguous_range_for_group_unless(&node, has_visibility, &mut visited_mods) + { + res.push(Fold { range, kind: FoldKind::Mods }) + } + } + } + } + } + + res +} + +fn fold_kind(kind: SyntaxKind) -> Option { + match kind { + COMMENT => Some(FoldKind::Comment), + USE_ITEM => Some(FoldKind::Imports), + RECORD_FIELD_DEF_LIST + | RECORD_FIELD_PAT_LIST + | ITEM_LIST + | EXTERN_ITEM_LIST + | USE_TREE_LIST + | BLOCK + | MATCH_ARM_LIST + | ENUM_VARIANT_LIST + | TOKEN_TREE => Some(FoldKind::Block), + _ => None, + } +} + +fn has_visibility(node: &SyntaxNode) -> bool { + ast::Module::cast(node.clone()).and_then(|m| m.visibility()).is_some() +} + +fn contiguous_range_for_group( + first: &SyntaxNode, + visited: &mut FxHashSet, +) -> Option { + contiguous_range_for_group_unless(first, |_| false, visited) +} + +fn contiguous_range_for_group_unless( + first: &SyntaxNode, + unless: impl Fn(&SyntaxNode) -> bool, + visited: &mut FxHashSet, +) -> Option { + visited.insert(first.clone()); + + let mut last = first.clone(); + for element in first.siblings_with_tokens(Direction::Next) { + let node = match element { + NodeOrToken::Token(token) => { + if let Some(ws) = ast::Whitespace::cast(token) { + if !ws.spans_multiple_lines() { + // Ignore whitespace without blank lines + continue; + } + } + // There is a blank line or another token, which means that the + // group ends here + break; + } + NodeOrToken::Node(node) => node, + }; + + // Stop if we find a node that doesn't belong to the group + if node.kind() != first.kind() || unless(&node) { + break; + } + + visited.insert(node.clone()); + last = node; + } + + if first != &last { + Some(TextRange::from_to(first.text_range().start(), last.text_range().end())) + } else { + // The group consists of only one element, therefore it cannot be folded + None + } +} + +fn contiguous_range_for_comment( + first: ast::Comment, + visited: &mut FxHashSet, +) -> Option { + visited.insert(first.clone()); + + // Only fold comments of the same flavor + let group_kind = first.kind(); + if !group_kind.shape.is_line() { + return None; + } + + let mut last = first.clone(); + for element in first.syntax().siblings_with_tokens(Direction::Next) { + match element { + NodeOrToken::Token(token) => { + if let Some(ws) = ast::Whitespace::cast(token.clone()) { + if !ws.spans_multiple_lines() { + // Ignore whitespace without blank lines + continue; + } + } + if let Some(c) = ast::Comment::cast(token) { + if c.kind() == group_kind { + visited.insert(c.clone()); + last = c; + continue; + } + } + // The comment group ends because either: + // * An element of a different kind was reached + // * A comment of a different flavor was reached + break; + } + NodeOrToken::Node(_) => break, + }; + } + + if first != last { + Some(TextRange::from_to( + first.syntax().text_range().start(), + last.syntax().text_range().end(), + )) + } else { + // The group consists of only one element, therefore it cannot be folded + None + } +} + +#[cfg(test)] +mod tests { + use super::*; + use test_utils::extract_ranges; + + fn do_check(text: &str, fold_kinds: &[FoldKind]) { + let (ranges, text) = extract_ranges(text, "fold"); + let parse = SourceFile::parse(&text); + let folds = folding_ranges(&parse.tree()); + + assert_eq!( + folds.len(), + ranges.len(), + "The amount of folds is different than the expected amount" + ); + assert_eq!( + folds.len(), + fold_kinds.len(), + "The amount of fold kinds is different than the expected amount" + ); + for ((fold, range), fold_kind) in + folds.iter().zip(ranges.into_iter()).zip(fold_kinds.iter()) + { + assert_eq!(fold.range.start(), range.start()); + assert_eq!(fold.range.end(), range.end()); + assert_eq!(&fold.kind, fold_kind); + } + } + + #[test] + fn test_fold_comments() { + let text = r#" +// Hello +// this is a multiline +// comment +// + +// But this is not + +fn main() { + // We should + // also + // fold + // this one. + //! But this one is different + //! because it has another flavor + /* As does this + multiline comment */ +}"#; + + let fold_kinds = &[ + FoldKind::Comment, + FoldKind::Block, + FoldKind::Comment, + FoldKind::Comment, + FoldKind::Comment, + ]; + do_check(text, fold_kinds); + } + + #[test] + fn test_fold_imports() { + let text = r#" +use std::{ + str, + vec, + io as iop +}; + +fn main() { +}"#; + + let folds = &[FoldKind::Imports, FoldKind::Block, FoldKind::Block]; + do_check(text, folds); + } + + #[test] + fn test_fold_mods() { + let text = r#" + +pub mod foo; +mod after_pub; +mod after_pub_next; + +mod before_pub; +mod before_pub_next; +pub mod bar; + +mod not_folding_single; +pub mod foobar; +pub not_folding_single_next; + +#[cfg(test)] +mod with_attribute; +mod with_attribute_next; + +fn main() { +}"#; + + let folds = &[FoldKind::Mods, FoldKind::Mods, FoldKind::Mods, FoldKind::Block]; + do_check(text, folds); + } + + #[test] + fn test_fold_import_groups() { + let text = r#" +use std::str; +use std::vec; +use std::io as iop; + +use std::mem; +use std::f64; + +use std::collections::HashMap; +// Some random comment +use std::collections::VecDeque; + +fn main() { +}"#; + + let folds = &[FoldKind::Imports, FoldKind::Imports, FoldKind::Block]; + do_check(text, folds); + } + + #[test] + fn test_fold_import_and_groups() { + let text = r#" +use std::str; +use std::vec; +use std::io as iop; + +use std::mem; +use std::f64; + +use std::collections::{ + HashMap, + VecDeque, +}; +// Some random comment + +fn main() { +}"#; + + let folds = &[ + FoldKind::Imports, + FoldKind::Imports, + FoldKind::Imports, + FoldKind::Block, + FoldKind::Block, + ]; + do_check(text, folds); + } + + #[test] + fn test_folds_macros() { + let text = r#" +macro_rules! foo { + ($($tt:tt)*) => { $($tt)* } +} +"#; + + let folds = &[FoldKind::Block]; + do_check(text, folds); + } + + #[test] + fn test_fold_match_arms() { + let text = r#" +fn main() { + match 0 { + 0 => 0, + _ => 1, + } +}"#; + + let folds = &[FoldKind::Block, FoldKind::Block]; + do_check(text, folds); + } +} diff --git a/crates/ra_ide/src/goto_definition.rs b/crates/ra_ide/src/goto_definition.rs new file mode 100644 index 000000000..c10a6c844 --- /dev/null +++ b/crates/ra_ide/src/goto_definition.rs @@ -0,0 +1,696 @@ +//! FIXME: write short doc here + +use hir::{db::AstDatabase, Source}; +use ra_syntax::{ + ast::{self, DocCommentsOwner}, + match_ast, AstNode, SyntaxNode, +}; + +use crate::{ + db::RootDatabase, + display::{ShortLabel, ToNav}, + expand::descend_into_macros, + references::{classify_name_ref, NameKind::*}, + FilePosition, NavigationTarget, RangeInfo, +}; + +pub(crate) fn goto_definition( + db: &RootDatabase, + position: FilePosition, +) -> Option>> { + let file = db.parse_or_expand(position.file_id.into())?; + let token = file.token_at_offset(position.offset).filter(|it| !it.kind().is_trivia()).next()?; + let token = descend_into_macros(db, position.file_id, token); + + let res = match_ast! { + match (token.value.parent()) { + ast::NameRef(name_ref) => { + let navs = reference_definition(db, token.with_value(&name_ref)).to_vec(); + RangeInfo::new(name_ref.syntax().text_range(), navs.to_vec()) + }, + ast::Name(name) => { + let navs = name_definition(db, token.with_value(&name))?; + RangeInfo::new(name.syntax().text_range(), navs) + + }, + _ => return None, + } + }; + + Some(res) +} + +#[derive(Debug)] +pub(crate) enum ReferenceResult { + Exact(NavigationTarget), + Approximate(Vec), +} + +impl ReferenceResult { + fn to_vec(self) -> Vec { + use self::ReferenceResult::*; + match self { + Exact(target) => vec![target], + Approximate(vec) => vec, + } + } +} + +pub(crate) fn reference_definition( + db: &RootDatabase, + name_ref: Source<&ast::NameRef>, +) -> ReferenceResult { + use self::ReferenceResult::*; + + let name_kind = classify_name_ref(db, name_ref).map(|d| d.kind); + match name_kind { + Some(Macro(mac)) => return Exact(mac.to_nav(db)), + Some(Field(field)) => return Exact(field.to_nav(db)), + Some(AssocItem(assoc)) => return Exact(assoc.to_nav(db)), + Some(Def(def)) => match NavigationTarget::from_def(db, def) { + Some(nav) => return Exact(nav), + None => return Approximate(vec![]), + }, + Some(SelfType(imp)) => { + // FIXME: ideally, this should point to the type in the impl, and + // not at the whole impl. And goto **type** definition should bring + // us to the actual type + return Exact(imp.to_nav(db)); + } + Some(Local(local)) => return Exact(local.to_nav(db)), + Some(GenericParam(_)) => { + // FIXME: go to the generic param def + } + None => {} + }; + + // Fallback index based approach: + let navs = crate::symbol_index::index_resolve(db, name_ref.value) + .into_iter() + .map(|s| s.to_nav(db)) + .collect(); + Approximate(navs) +} + +pub(crate) fn name_definition( + db: &RootDatabase, + name: Source<&ast::Name>, +) -> Option> { + let parent = name.value.syntax().parent()?; + + if let Some(module) = ast::Module::cast(parent.clone()) { + if module.has_semi() { + let src = name.with_value(module); + if let Some(child_module) = hir::Module::from_declaration(db, src) { + let nav = child_module.to_nav(db); + return Some(vec![nav]); + } + } + } + + if let Some(nav) = named_target(db, name.with_value(&parent)) { + return Some(vec![nav]); + } + + None +} + +fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option { + match_ast! { + match (node.value) { + ast::StructDef(it) => { + Some(NavigationTarget::from_named( + db, + node.with_value(&it), + it.doc_comment_text(), + it.short_label(), + )) + }, + ast::EnumDef(it) => { + Some(NavigationTarget::from_named( + db, + node.with_value(&it), + it.doc_comment_text(), + it.short_label(), + )) + }, + ast::EnumVariant(it) => { + Some(NavigationTarget::from_named( + db, + node.with_value(&it), + it.doc_comment_text(), + it.short_label(), + )) + }, + ast::FnDef(it) => { + Some(NavigationTarget::from_named( + db, + node.with_value(&it), + it.doc_comment_text(), + it.short_label(), + )) + }, + ast::TypeAliasDef(it) => { + Some(NavigationTarget::from_named( + db, + node.with_value(&it), + it.doc_comment_text(), + it.short_label(), + )) + }, + ast::ConstDef(it) => { + Some(NavigationTarget::from_named( + db, + node.with_value(&it), + it.doc_comment_text(), + it.short_label(), + )) + }, + ast::StaticDef(it) => { + Some(NavigationTarget::from_named( + db, + node.with_value(&it), + it.doc_comment_text(), + it.short_label(), + )) + }, + ast::TraitDef(it) => { + Some(NavigationTarget::from_named( + db, + node.with_value(&it), + it.doc_comment_text(), + it.short_label(), + )) + }, + ast::RecordFieldDef(it) => { + Some(NavigationTarget::from_named( + db, + node.with_value(&it), + it.doc_comment_text(), + it.short_label(), + )) + }, + ast::Module(it) => { + Some(NavigationTarget::from_named( + db, + node.with_value(&it), + it.doc_comment_text(), + it.short_label(), + )) + }, + ast::MacroCall(it) => { + Some(NavigationTarget::from_named( + db, + node.with_value(&it), + it.doc_comment_text(), + None, + )) + }, + _ => None, + } + } +} + +#[cfg(test)] +mod tests { + use test_utils::covers; + + use crate::mock_analysis::analysis_and_position; + + fn check_goto(fixture: &str, expected: &str) { + let (analysis, pos) = analysis_and_position(fixture); + + let mut navs = analysis.goto_definition(pos).unwrap().unwrap().info; + assert_eq!(navs.len(), 1); + let nav = navs.pop().unwrap(); + nav.assert_match(expected); + } + + #[test] + fn goto_definition_works_in_items() { + check_goto( + " + //- /lib.rs + struct Foo; + enum E { X(Foo<|>) } + ", + "Foo STRUCT_DEF FileId(1) [0; 11) [7; 10)", + ); + } + + #[test] + fn goto_definition_resolves_correct_name() { + check_goto( + " + //- /lib.rs + use a::Foo; + mod a; + mod b; + enum E { X(Foo<|>) } + //- /a.rs + struct Foo; + //- /b.rs + struct Foo; + ", + "Foo STRUCT_DEF FileId(2) [0; 11) [7; 10)", + ); + } + + #[test] + fn goto_definition_works_for_module_declaration() { + check_goto( + " + //- /lib.rs + mod <|>foo; + //- /foo.rs + // empty + ", + "foo SOURCE_FILE FileId(2) [0; 10)", + ); + + check_goto( + " + //- /lib.rs + mod <|>foo; + //- /foo/mod.rs + // empty + ", + "foo SOURCE_FILE FileId(2) [0; 10)", + ); + } + + #[test] + fn goto_definition_works_for_macros() { + covers!(goto_definition_works_for_macros); + check_goto( + " + //- /lib.rs + macro_rules! foo { + () => { + {} + }; + } + + fn bar() { + <|>foo!(); + } + ", + "foo MACRO_CALL FileId(1) [0; 50) [13; 16)", + ); + } + + #[test] + fn goto_definition_works_for_macros_from_other_crates() { + covers!(goto_definition_works_for_macros); + check_goto( + " + //- /lib.rs + use foo::foo; + fn bar() { + <|>foo!(); + } + + //- /foo/lib.rs + #[macro_export] + macro_rules! foo { + () => { + {} + }; + } + ", + "foo MACRO_CALL FileId(2) [0; 66) [29; 32)", + ); + } + + #[test] + fn goto_definition_works_for_macros_in_use_tree() { + check_goto( + " + //- /lib.rs + use foo::foo<|>; + + //- /foo/lib.rs + #[macro_export] + macro_rules! foo { + () => { + {} + }; + } + ", + "foo MACRO_CALL FileId(2) [0; 66) [29; 32)", + ); + } + + #[test] + fn goto_definition_works_for_macro_defined_fn_with_arg() { + check_goto( + " + //- /lib.rs + macro_rules! define_fn { + ($name:ident) => (fn $name() {}) + } + + define_fn!( + foo + ) + + fn bar() { + <|>foo(); + } + ", + "foo FN_DEF FileId(1) [80; 83) [80; 83)", + ); + } + + #[test] + fn goto_definition_works_for_macro_defined_fn_no_arg() { + check_goto( + " + //- /lib.rs + macro_rules! define_fn { + () => (fn foo() {}) + } + + define_fn!(); + + fn bar() { + <|>foo(); + } + ", + "foo FN_DEF FileId(1) [39; 42) [39; 42)", + ); + } + + #[test] + fn goto_definition_works_for_methods() { + covers!(goto_definition_works_for_methods); + check_goto( + " + //- /lib.rs + struct Foo; + impl Foo { + fn frobnicate(&self) { } + } + + fn bar(foo: &Foo) { + foo.frobnicate<|>(); + } + ", + "frobnicate FN_DEF FileId(1) [27; 52) [30; 40)", + ); + } + + #[test] + fn goto_definition_works_for_fields() { + covers!(goto_definition_works_for_fields); + check_goto( + " + //- /lib.rs + struct Foo { + spam: u32, + } + + fn bar(foo: &Foo) { + foo.spam<|>; + } + ", + "spam RECORD_FIELD_DEF FileId(1) [17; 26) [17; 21)", + ); + } + + #[test] + fn goto_definition_works_for_record_fields() { + covers!(goto_definition_works_for_record_fields); + check_goto( + " + //- /lib.rs + struct Foo { + spam: u32, + } + + fn bar() -> Foo { + Foo { + spam<|>: 0, + } + } + ", + "spam RECORD_FIELD_DEF FileId(1) [17; 26) [17; 21)", + ); + } + + #[test] + fn goto_definition_works_for_ufcs_inherent_methods() { + check_goto( + " + //- /lib.rs + struct Foo; + impl Foo { + fn frobnicate() { } + } + + fn bar(foo: &Foo) { + Foo::frobnicate<|>(); + } + ", + "frobnicate FN_DEF FileId(1) [27; 47) [30; 40)", + ); + } + + #[test] + fn goto_definition_works_for_ufcs_trait_methods_through_traits() { + check_goto( + " + //- /lib.rs + trait Foo { + fn frobnicate(); + } + + fn bar() { + Foo::frobnicate<|>(); + } + ", + "frobnicate FN_DEF FileId(1) [16; 32) [19; 29)", + ); + } + + #[test] + fn goto_definition_works_for_ufcs_trait_methods_through_self() { + check_goto( + " + //- /lib.rs + struct Foo; + trait Trait { + fn frobnicate(); + } + impl Trait for Foo {} + + fn bar() { + Foo::frobnicate<|>(); + } + ", + "frobnicate FN_DEF FileId(1) [30; 46) [33; 43)", + ); + } + + #[test] + fn goto_definition_on_self() { + check_goto( + " + //- /lib.rs + struct Foo; + impl Foo { + pub fn new() -> Self { + Self<|> {} + } + } + ", + "impl IMPL_BLOCK FileId(1) [12; 73)", + ); + + check_goto( + " + //- /lib.rs + struct Foo; + impl Foo { + pub fn new() -> Self<|> { + Self {} + } + } + ", + "impl IMPL_BLOCK FileId(1) [12; 73)", + ); + + check_goto( + " + //- /lib.rs + enum Foo { A } + impl Foo { + pub fn new() -> Self<|> { + Foo::A + } + } + ", + "impl IMPL_BLOCK FileId(1) [15; 75)", + ); + + check_goto( + " + //- /lib.rs + enum Foo { A } + impl Foo { + pub fn thing(a: &Self<|>) { + } + } + ", + "impl IMPL_BLOCK FileId(1) [15; 62)", + ); + } + + #[test] + fn goto_definition_on_self_in_trait_impl() { + check_goto( + " + //- /lib.rs + struct Foo; + trait Make { + fn new() -> Self; + } + impl Make for Foo { + fn new() -> Self { + Self<|> {} + } + } + ", + "impl IMPL_BLOCK FileId(1) [49; 115)", + ); + + check_goto( + " + //- /lib.rs + struct Foo; + trait Make { + fn new() -> Self; + } + impl Make for Foo { + fn new() -> Self<|> { + Self {} + } + } + ", + "impl IMPL_BLOCK FileId(1) [49; 115)", + ); + } + + #[test] + fn goto_definition_works_when_used_on_definition_name_itself() { + check_goto( + " + //- /lib.rs + struct Foo<|> { value: u32 } + ", + "Foo STRUCT_DEF FileId(1) [0; 25) [7; 10)", + ); + + check_goto( + r#" + //- /lib.rs + struct Foo { + field<|>: string, + } + "#, + "field RECORD_FIELD_DEF FileId(1) [17; 30) [17; 22)", + ); + + check_goto( + " + //- /lib.rs + fn foo_test<|>() { + } + ", + "foo_test FN_DEF FileId(1) [0; 17) [3; 11)", + ); + + check_goto( + " + //- /lib.rs + enum Foo<|> { + Variant, + } + ", + "Foo ENUM_DEF FileId(1) [0; 25) [5; 8)", + ); + + check_goto( + " + //- /lib.rs + enum Foo { + Variant1, + Variant2<|>, + Variant3, + } + ", + "Variant2 ENUM_VARIANT FileId(1) [29; 37) [29; 37)", + ); + + check_goto( + r#" + //- /lib.rs + static inner<|>: &str = ""; + "#, + "inner STATIC_DEF FileId(1) [0; 24) [7; 12)", + ); + + check_goto( + r#" + //- /lib.rs + const inner<|>: &str = ""; + "#, + "inner CONST_DEF FileId(1) [0; 23) [6; 11)", + ); + + check_goto( + r#" + //- /lib.rs + type Thing<|> = Option<()>; + "#, + "Thing TYPE_ALIAS_DEF FileId(1) [0; 24) [5; 10)", + ); + + check_goto( + r#" + //- /lib.rs + trait Foo<|> { + } + "#, + "Foo TRAIT_DEF FileId(1) [0; 13) [6; 9)", + ); + + check_goto( + r#" + //- /lib.rs + mod bar<|> { + } + "#, + "bar MODULE FileId(1) [0; 11) [4; 7)", + ); + } + + #[test] + fn goto_from_macro() { + check_goto( + " + //- /lib.rs + macro_rules! id { + ($($tt:tt)*) => { $($tt)* } + } + fn foo() {} + id! { + fn bar() { + fo<|>o(); + } + } + ", + "foo FN_DEF FileId(1) [52; 63) [55; 58)", + ); + } +} diff --git a/crates/ra_ide/src/goto_type_definition.rs b/crates/ra_ide/src/goto_type_definition.rs new file mode 100644 index 000000000..992a08809 --- /dev/null +++ b/crates/ra_ide/src/goto_type_definition.rs @@ -0,0 +1,105 @@ +//! FIXME: write short doc here + +use hir::db::AstDatabase; +use ra_syntax::{ast, AstNode}; + +use crate::{ + db::RootDatabase, display::ToNav, expand::descend_into_macros, FilePosition, NavigationTarget, + RangeInfo, +}; + +pub(crate) fn goto_type_definition( + db: &RootDatabase, + position: FilePosition, +) -> Option>> { + let file = db.parse_or_expand(position.file_id.into())?; + let token = file.token_at_offset(position.offset).filter(|it| !it.kind().is_trivia()).next()?; + let token = descend_into_macros(db, position.file_id, token); + + let node = token.value.ancestors().find_map(|token| { + token + .ancestors() + .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some()) + })?; + + let analyzer = hir::SourceAnalyzer::new(db, token.with_value(&node), None); + + let ty: hir::Type = if let Some(ty) = + ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e)) + { + ty + } else if let Some(ty) = ast::Pat::cast(node.clone()).and_then(|p| analyzer.type_of_pat(db, &p)) + { + ty + } else { + return None; + }; + + let adt_def = ty.autoderef(db).find_map(|ty| ty.as_adt())?; + + let nav = adt_def.to_nav(db); + Some(RangeInfo::new(node.text_range(), vec![nav])) +} + +#[cfg(test)] +mod tests { + use crate::mock_analysis::analysis_and_position; + + fn check_goto(fixture: &str, expected: &str) { + let (analysis, pos) = analysis_and_position(fixture); + + let mut navs = analysis.goto_type_definition(pos).unwrap().unwrap().info; + assert_eq!(navs.len(), 1); + let nav = navs.pop().unwrap(); + nav.assert_match(expected); + } + + #[test] + fn goto_type_definition_works_simple() { + check_goto( + " + //- /lib.rs + struct Foo; + fn foo() { + let f: Foo; + f<|> + } + ", + "Foo STRUCT_DEF FileId(1) [0; 11) [7; 10)", + ); + } + + #[test] + fn goto_type_definition_works_simple_ref() { + check_goto( + " + //- /lib.rs + struct Foo; + fn foo() { + let f: &Foo; + f<|> + } + ", + "Foo STRUCT_DEF FileId(1) [0; 11) [7; 10)", + ); + } + + #[test] + fn goto_type_definition_works_through_macro() { + check_goto( + " + //- /lib.rs + macro_rules! id { + ($($tt:tt)*) => { $($tt)* } + } + struct Foo {} + id! { + fn bar() { + let f<|> = Foo {}; + } + } + ", + "Foo STRUCT_DEF FileId(1) [52; 65) [59; 62)", + ); + } +} diff --git a/crates/ra_ide/src/hover.rs b/crates/ra_ide/src/hover.rs new file mode 100644 index 000000000..260a7b869 --- /dev/null +++ b/crates/ra_ide/src/hover.rs @@ -0,0 +1,730 @@ +//! FIXME: write short doc here + +use hir::{db::AstDatabase, Adt, HasSource, HirDisplay}; +use ra_db::SourceDatabase; +use ra_syntax::{ + algo::find_covering_element, + ast::{self, DocCommentsOwner}, + match_ast, AstNode, +}; + +use crate::{ + db::RootDatabase, + display::{ + description_from_symbol, docs_from_symbol, macro_label, rust_code_markup, + rust_code_markup_with_doc, ShortLabel, + }, + expand::descend_into_macros, + references::{classify_name, classify_name_ref, NameKind, NameKind::*}, + FilePosition, FileRange, RangeInfo, +}; + +/// Contains the results when hovering over an item +#[derive(Debug, Clone)] +pub struct HoverResult { + results: Vec, + exact: bool, +} + +impl Default for HoverResult { + fn default() -> Self { + HoverResult::new() + } +} + +impl HoverResult { + pub fn new() -> HoverResult { + HoverResult { + results: Vec::new(), + // We assume exact by default + exact: true, + } + } + + pub fn extend(&mut self, item: Option) { + self.results.extend(item); + } + + pub fn is_exact(&self) -> bool { + self.exact + } + + pub fn is_empty(&self) -> bool { + self.results.is_empty() + } + + pub fn len(&self) -> usize { + self.results.len() + } + + pub fn first(&self) -> Option<&str> { + self.results.first().map(String::as_str) + } + + pub fn results(&self) -> &[String] { + &self.results + } + + /// Returns the results converted into markup + /// for displaying in a UI + pub fn to_markup(&self) -> String { + let mut markup = if !self.exact { + let mut msg = String::from("Failed to exactly resolve the symbol. This is probably because rust_analyzer does not yet support traits."); + if !self.results.is_empty() { + msg.push_str(" \nThese items were found instead:"); + } + msg.push_str("\n\n---\n"); + msg + } else { + String::new() + }; + + markup.push_str(&self.results.join("\n\n---\n")); + + markup + } +} + +fn hover_text(docs: Option, desc: Option) -> Option { + match (desc, docs) { + (Some(desc), docs) => Some(rust_code_markup_with_doc(desc, docs)), + (None, Some(docs)) => Some(docs), + _ => None, + } +} + +fn hover_text_from_name_kind( + db: &RootDatabase, + name_kind: NameKind, + no_fallback: &mut bool, +) -> Option { + return match name_kind { + Macro(it) => { + let src = it.source(db); + hover_text(src.value.doc_comment_text(), Some(macro_label(&src.value))) + } + Field(it) => { + let src = it.source(db); + match src.value { + hir::FieldSource::Named(it) => hover_text(it.doc_comment_text(), it.short_label()), + _ => None, + } + } + AssocItem(it) => match it { + hir::AssocItem::Function(it) => from_def_source(db, it), + hir::AssocItem::Const(it) => from_def_source(db, it), + hir::AssocItem::TypeAlias(it) => from_def_source(db, it), + }, + Def(it) => match it { + hir::ModuleDef::Module(it) => match it.definition_source(db).value { + hir::ModuleSource::Module(it) => { + hover_text(it.doc_comment_text(), it.short_label()) + } + _ => None, + }, + hir::ModuleDef::Function(it) => from_def_source(db, it), + hir::ModuleDef::Adt(Adt::Struct(it)) => from_def_source(db, it), + hir::ModuleDef::Adt(Adt::Union(it)) => from_def_source(db, it), + hir::ModuleDef::Adt(Adt::Enum(it)) => from_def_source(db, it), + hir::ModuleDef::EnumVariant(it) => from_def_source(db, it), + hir::ModuleDef::Const(it) => from_def_source(db, it), + hir::ModuleDef::Static(it) => from_def_source(db, it), + hir::ModuleDef::Trait(it) => from_def_source(db, it), + hir::ModuleDef::TypeAlias(it) => from_def_source(db, it), + hir::ModuleDef::BuiltinType(it) => Some(it.to_string()), + }, + Local(_) => { + // Hover for these shows type names + *no_fallback = true; + None + } + GenericParam(_) | SelfType(_) => { + // FIXME: Hover for generic param + None + } + }; + + fn from_def_source(db: &RootDatabase, def: D) -> Option + where + D: HasSource, + A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, + { + let src = def.source(db); + hover_text(src.value.doc_comment_text(), src.value.short_label()) + } +} + +pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option> { + let file = db.parse_or_expand(position.file_id.into())?; + let token = file.token_at_offset(position.offset).filter(|it| !it.kind().is_trivia()).next()?; + let token = descend_into_macros(db, position.file_id, token); + + let mut res = HoverResult::new(); + + let mut range = match_ast! { + match (token.value.parent()) { + ast::NameRef(name_ref) => { + let mut no_fallback = false; + if let Some(name_kind) = + classify_name_ref(db, token.with_value(&name_ref)).map(|d| d.kind) + { + res.extend(hover_text_from_name_kind(db, name_kind, &mut no_fallback)) + } + + if res.is_empty() && !no_fallback { + // Fallback index based approach: + let symbols = crate::symbol_index::index_resolve(db, &name_ref); + for sym in symbols { + let docs = docs_from_symbol(db, &sym); + let desc = description_from_symbol(db, &sym); + res.extend(hover_text(docs, desc)); + } + } + + if !res.is_empty() { + Some(name_ref.syntax().text_range()) + } else { + None + } + }, + ast::Name(name) => { + if let Some(name_kind) = classify_name(db, token.with_value(&name)).map(|d| d.kind) { + res.extend(hover_text_from_name_kind(db, name_kind, &mut true)); + } + + if !res.is_empty() { + Some(name.syntax().text_range()) + } else { + None + } + }, + _ => None, + } + }; + + if range.is_none() { + let node = token.value.ancestors().find(|n| { + ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some() + })?; + let frange = FileRange { file_id: position.file_id, range: node.text_range() }; + res.extend(type_of(db, frange).map(rust_code_markup)); + range = Some(node.text_range()); + }; + + let range = range?; + if res.is_empty() { + return None; + } + Some(RangeInfo::new(range, res)) +} + +pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option { + let parse = db.parse(frange.file_id); + let leaf_node = find_covering_element(parse.tree().syntax(), frange.range); + // if we picked identifier, expand to pattern/expression + let node = leaf_node + .ancestors() + .take_while(|it| it.text_range() == leaf_node.text_range()) + .find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?; + let analyzer = + hir::SourceAnalyzer::new(db, hir::Source::new(frange.file_id.into(), &node), None); + let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e)) + { + ty + } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, &p)) { + ty + } else { + return None; + }; + Some(ty.display(db).to_string()) +} + +#[cfg(test)] +mod tests { + use crate::mock_analysis::{ + analysis_and_position, single_file_with_position, single_file_with_range, + }; + use ra_syntax::TextRange; + + fn trim_markup(s: &str) -> &str { + s.trim_start_matches("```rust\n").trim_end_matches("\n```") + } + + fn trim_markup_opt(s: Option<&str>) -> Option<&str> { + s.map(trim_markup) + } + + fn check_hover_result(fixture: &str, expected: &[&str]) { + let (analysis, position) = analysis_and_position(fixture); + let hover = analysis.hover(position).unwrap().unwrap(); + let mut results = Vec::from(hover.info.results()); + results.sort(); + + for (markup, expected) in + results.iter().zip(expected.iter().chain(std::iter::repeat(&""))) + { + assert_eq!(trim_markup(&markup), *expected); + } + + assert_eq!(hover.info.len(), expected.len()); + } + + #[test] + fn hover_shows_type_of_an_expression() { + let (analysis, position) = single_file_with_position( + " + pub fn foo() -> u32 { 1 } + + fn main() { + let foo_test = foo()<|>; + } + ", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(hover.range, TextRange::from_to(95.into(), 100.into())); + assert_eq!(trim_markup_opt(hover.info.first()), Some("u32")); + } + + #[test] + fn hover_shows_fn_signature() { + // Single file with result + check_hover_result( + r#" + //- /main.rs + pub fn foo() -> u32 { 1 } + + fn main() { + let foo_test = fo<|>o(); + } + "#, + &["pub fn foo() -> u32"], + ); + + // Multiple results + check_hover_result( + r#" + //- /a.rs + pub fn foo() -> u32 { 1 } + + //- /b.rs + pub fn foo() -> &str { "" } + + //- /c.rs + pub fn foo(a: u32, b: u32) {} + + //- /main.rs + mod a; + mod b; + mod c; + + fn main() { + let foo_test = fo<|>o(); + } + "#, + &["pub fn foo() -> &str", "pub fn foo() -> u32", "pub fn foo(a: u32, b: u32)"], + ); + } + + #[test] + fn hover_shows_fn_signature_with_type_params() { + check_hover_result( + r#" + //- /main.rs + pub fn foo<'a, T: AsRef>(b: &'a T) -> &'a str { } + + fn main() { + let foo_test = fo<|>o(); + } + "#, + &["pub fn foo<'a, T: AsRef>(b: &'a T) -> &'a str"], + ); + } + + #[test] + fn hover_shows_fn_signature_on_fn_name() { + check_hover_result( + r#" + //- /main.rs + pub fn foo<|>(a: u32, b: u32) -> u32 {} + + fn main() { + } + "#, + &["pub fn foo(a: u32, b: u32) -> u32"], + ); + } + + #[test] + fn hover_shows_struct_field_info() { + // Hovering over the field when instantiating + check_hover_result( + r#" + //- /main.rs + struct Foo { + field_a: u32, + } + + fn main() { + let foo = Foo { + field_a<|>: 0, + }; + } + "#, + &["field_a: u32"], + ); + + // Hovering over the field in the definition + check_hover_result( + r#" + //- /main.rs + struct Foo { + field_a<|>: u32, + } + + fn main() { + let foo = Foo { + field_a: 0, + }; + } + "#, + &["field_a: u32"], + ); + } + + #[test] + fn hover_const_static() { + check_hover_result( + r#" + //- /main.rs + const foo<|>: u32 = 0; + "#, + &["const foo: u32"], + ); + + check_hover_result( + r#" + //- /main.rs + static foo<|>: u32 = 0; + "#, + &["static foo: u32"], + ); + } + + #[test] + fn hover_some() { + let (analysis, position) = single_file_with_position( + " + enum Option { Some(T) } + use Option::Some; + + fn main() { + So<|>me(12); + } + ", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup_opt(hover.info.first()), Some("Some")); + + let (analysis, position) = single_file_with_position( + " + enum Option { Some(T) } + use Option::Some; + + fn main() { + let b<|>ar = Some(12); + } + ", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup_opt(hover.info.first()), Some("Option")); + } + + #[test] + fn hover_enum_variant() { + check_hover_result( + r#" + //- /main.rs + enum Option { + /// The None variant + Non<|>e + } + "#, + &[" +None +``` + +The None variant + " + .trim()], + ); + + check_hover_result( + r#" + //- /main.rs + enum Option { + /// The Some variant + Some(T) + } + fn main() { + let s = Option::Som<|>e(12); + } + "#, + &[" +Some +``` + +The Some variant + " + .trim()], + ); + } + + #[test] + fn hover_for_local_variable() { + let (analysis, position) = single_file_with_position("fn func(foo: i32) { fo<|>o; }"); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup_opt(hover.info.first()), Some("i32")); + } + + #[test] + fn hover_for_local_variable_pat() { + let (analysis, position) = single_file_with_position("fn func(fo<|>o: i32) {}"); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup_opt(hover.info.first()), Some("i32")); + } + + #[test] + fn hover_local_var_edge() { + let (analysis, position) = single_file_with_position( + " +fn func(foo: i32) { if true { <|>foo; }; } +", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup_opt(hover.info.first()), Some("i32")); + } + + #[test] + fn test_type_of_for_function() { + let (analysis, range) = single_file_with_range( + " + pub fn foo() -> u32 { 1 }; + + fn main() { + let foo_test = <|>foo()<|>; + } + ", + ); + + let type_name = analysis.type_of(range).unwrap().unwrap(); + assert_eq!("u32", &type_name); + } + + #[test] + fn test_type_of_for_expr() { + let (analysis, range) = single_file_with_range( + " + fn main() { + let foo: usize = 1; + let bar = <|>1 + foo<|>; + } + ", + ); + + let type_name = analysis.type_of(range).unwrap().unwrap(); + assert_eq!("usize", &type_name); + } + + #[test] + fn test_hover_infer_associated_method_result() { + let (analysis, position) = single_file_with_position( + " + struct Thing { x: u32 } + + impl Thing { + fn new() -> Thing { + Thing { x: 0 } + } + } + + fn main() { + let foo_<|>test = Thing::new(); + } + ", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup_opt(hover.info.first()), Some("Thing")); + } + + #[test] + fn test_hover_infer_associated_method_exact() { + let (analysis, position) = single_file_with_position( + " + struct Thing { x: u32 } + + impl Thing { + fn new() -> Thing { + Thing { x: 0 } + } + } + + fn main() { + let foo_test = Thing::new<|>(); + } + ", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup_opt(hover.info.first()), Some("fn new() -> Thing")); + assert_eq!(hover.info.is_exact(), true); + } + + #[test] + fn test_hover_infer_associated_const_in_pattern() { + let (analysis, position) = single_file_with_position( + " + struct X; + impl X { + const C: u32 = 1; + } + + fn main() { + match 1 { + X::C<|> => {}, + 2 => {}, + _ => {} + }; + } + ", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup_opt(hover.info.first()), Some("const C: u32")); + assert_eq!(hover.info.is_exact(), true); + } + + #[test] + fn test_hover_self() { + let (analysis, position) = single_file_with_position( + " + struct Thing { x: u32 } + impl Thing { + fn new() -> Self { + Self<|> { x: 0 } + } + } + ", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup_opt(hover.info.first()), Some("Thing")); + assert_eq!(hover.info.is_exact(), true); + + /* FIXME: revive these tests + let (analysis, position) = single_file_with_position( + " + struct Thing { x: u32 } + impl Thing { + fn new() -> Self<|> { + Self { x: 0 } + } + } + ", + ); + + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup_opt(hover.info.first()), Some("Thing")); + assert_eq!(hover.info.is_exact(), true); + + let (analysis, position) = single_file_with_position( + " + enum Thing { A } + impl Thing { + pub fn new() -> Self<|> { + Thing::A + } + } + ", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup_opt(hover.info.first()), Some("enum Thing")); + assert_eq!(hover.info.is_exact(), true); + + let (analysis, position) = single_file_with_position( + " + enum Thing { A } + impl Thing { + pub fn thing(a: Self<|>) { + } + } + ", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup_opt(hover.info.first()), Some("enum Thing")); + assert_eq!(hover.info.is_exact(), true); + */ + } + + #[test] + fn test_hover_shadowing_pat() { + let (analysis, position) = single_file_with_position( + " + fn x() {} + + fn y() { + let x = 0i32; + x<|>; + } + ", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup_opt(hover.info.first()), Some("i32")); + assert_eq!(hover.info.is_exact(), true); + } + + #[test] + fn test_hover_macro_invocation() { + let (analysis, position) = single_file_with_position( + " + macro_rules! foo { + () => {} + } + + fn f() { + fo<|>o!(); + } + ", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup_opt(hover.info.first()), Some("macro_rules! foo")); + assert_eq!(hover.info.is_exact(), true); + } + + #[test] + fn test_hover_tuple_field() { + let (analysis, position) = single_file_with_position( + " + struct TS(String, i32<|>); + ", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(trim_markup_opt(hover.info.first()), Some("i32")); + assert_eq!(hover.info.is_exact(), true); + } + + #[test] + fn test_hover_through_macro() { + check_hover_result( + " + //- /lib.rs + macro_rules! id { + ($($tt:tt)*) => { $($tt)* } + } + fn foo() {} + id! { + fn bar() { + fo<|>o(); + } + } + ", + &["fn foo()"], + ); + } +} diff --git a/crates/ra_ide/src/impls.rs b/crates/ra_ide/src/impls.rs new file mode 100644 index 000000000..aa480e399 --- /dev/null +++ b/crates/ra_ide/src/impls.rs @@ -0,0 +1,206 @@ +//! FIXME: write short doc here + +use hir::{FromSource, ImplBlock}; +use ra_db::SourceDatabase; +use ra_syntax::{algo::find_node_at_offset, ast, AstNode}; + +use crate::{db::RootDatabase, display::ToNav, FilePosition, NavigationTarget, RangeInfo}; + +pub(crate) fn goto_implementation( + db: &RootDatabase, + position: FilePosition, +) -> Option>> { + let parse = db.parse(position.file_id); + let syntax = parse.tree().syntax().clone(); + + let src = hir::ModuleSource::from_position(db, position); + let module = hir::Module::from_definition( + db, + hir::Source { file_id: position.file_id.into(), value: src }, + )?; + + if let Some(nominal_def) = find_node_at_offset::(&syntax, position.offset) { + return Some(RangeInfo::new( + nominal_def.syntax().text_range(), + impls_for_def(db, position, &nominal_def, module)?, + )); + } else if let Some(trait_def) = find_node_at_offset::(&syntax, position.offset) { + return Some(RangeInfo::new( + trait_def.syntax().text_range(), + impls_for_trait(db, position, &trait_def, module)?, + )); + } + + None +} + +fn impls_for_def( + db: &RootDatabase, + position: FilePosition, + node: &ast::NominalDef, + module: hir::Module, +) -> Option> { + let ty = match node { + ast::NominalDef::StructDef(def) => { + let src = hir::Source { file_id: position.file_id.into(), value: def.clone() }; + hir::Struct::from_source(db, src)?.ty(db) + } + ast::NominalDef::EnumDef(def) => { + let src = hir::Source { file_id: position.file_id.into(), value: def.clone() }; + hir::Enum::from_source(db, src)?.ty(db) + } + ast::NominalDef::UnionDef(def) => { + let src = hir::Source { file_id: position.file_id.into(), value: def.clone() }; + hir::Union::from_source(db, src)?.ty(db) + } + }; + + let krate = module.krate(); + let impls = ImplBlock::all_in_crate(db, krate); + + Some( + impls + .into_iter() + .filter(|impl_block| ty.is_equal_for_find_impls(&impl_block.target_ty(db))) + .map(|imp| imp.to_nav(db)) + .collect(), + ) +} + +fn impls_for_trait( + db: &RootDatabase, + position: FilePosition, + node: &ast::TraitDef, + module: hir::Module, +) -> Option> { + let src = hir::Source { file_id: position.file_id.into(), value: node.clone() }; + let tr = hir::Trait::from_source(db, src)?; + + let krate = module.krate(); + let impls = ImplBlock::for_trait(db, krate, tr); + + Some(impls.into_iter().map(|imp| imp.to_nav(db)).collect()) +} + +#[cfg(test)] +mod tests { + use crate::mock_analysis::analysis_and_position; + + fn check_goto(fixture: &str, expected: &[&str]) { + let (analysis, pos) = analysis_and_position(fixture); + + let mut navs = analysis.goto_implementation(pos).unwrap().unwrap().info; + assert_eq!(navs.len(), expected.len()); + navs.sort_by_key(|nav| (nav.file_id(), nav.full_range().start())); + navs.into_iter().enumerate().for_each(|(i, nav)| nav.assert_match(expected[i])); + } + + #[test] + fn goto_implementation_works() { + check_goto( + " + //- /lib.rs + struct Foo<|>; + impl Foo {} + ", + &["impl IMPL_BLOCK FileId(1) [12; 23)"], + ); + } + + #[test] + fn goto_implementation_works_multiple_blocks() { + check_goto( + " + //- /lib.rs + struct Foo<|>; + impl Foo {} + impl Foo {} + ", + &["impl IMPL_BLOCK FileId(1) [12; 23)", "impl IMPL_BLOCK FileId(1) [24; 35)"], + ); + } + + #[test] + fn goto_implementation_works_multiple_mods() { + check_goto( + " + //- /lib.rs + struct Foo<|>; + mod a { + impl super::Foo {} + } + mod b { + impl super::Foo {} + } + ", + &["impl IMPL_BLOCK FileId(1) [24; 42)", "impl IMPL_BLOCK FileId(1) [57; 75)"], + ); + } + + #[test] + fn goto_implementation_works_multiple_files() { + check_goto( + " + //- /lib.rs + struct Foo<|>; + mod a; + mod b; + //- /a.rs + impl crate::Foo {} + //- /b.rs + impl crate::Foo {} + ", + &["impl IMPL_BLOCK FileId(2) [0; 18)", "impl IMPL_BLOCK FileId(3) [0; 18)"], + ); + } + + #[test] + fn goto_implementation_for_trait() { + check_goto( + " + //- /lib.rs + trait T<|> {} + struct Foo; + impl T for Foo {} + ", + &["impl IMPL_BLOCK FileId(1) [23; 40)"], + ); + } + + #[test] + fn goto_implementation_for_trait_multiple_files() { + check_goto( + " + //- /lib.rs + trait T<|> {}; + struct Foo; + mod a; + mod b; + //- /a.rs + impl crate::T for crate::Foo {} + //- /b.rs + impl crate::T for crate::Foo {} + ", + &["impl IMPL_BLOCK FileId(2) [0; 31)", "impl IMPL_BLOCK FileId(3) [0; 31)"], + ); + } + + #[test] + fn goto_implementation_all_impls() { + check_goto( + " + //- /lib.rs + trait T {} + struct Foo<|>; + impl Foo {} + impl T for Foo {} + impl T for &Foo {} + ", + &[ + "impl IMPL_BLOCK FileId(1) [23; 34)", + "impl IMPL_BLOCK FileId(1) [35; 52)", + "impl IMPL_BLOCK FileId(1) [53; 71)", + ], + ); + } +} diff --git a/crates/ra_ide/src/inlay_hints.rs b/crates/ra_ide/src/inlay_hints.rs new file mode 100644 index 000000000..45149bf0c --- /dev/null +++ b/crates/ra_ide/src/inlay_hints.rs @@ -0,0 +1,543 @@ +//! FIXME: write short doc here + +use crate::{db::RootDatabase, FileId}; +use hir::{HirDisplay, SourceAnalyzer}; +use ra_syntax::{ + ast::{self, AstNode, TypeAscriptionOwner}, + match_ast, SmolStr, SourceFile, SyntaxKind, SyntaxNode, TextRange, +}; + +#[derive(Debug, PartialEq, Eq)] +pub enum InlayKind { + TypeHint, +} + +#[derive(Debug)] +pub struct InlayHint { + pub range: TextRange, + pub kind: InlayKind, + pub label: SmolStr, +} + +pub(crate) fn inlay_hints( + db: &RootDatabase, + file_id: FileId, + file: &SourceFile, + max_inlay_hint_length: Option, +) -> Vec { + file.syntax() + .descendants() + .map(|node| get_inlay_hints(db, file_id, &node, max_inlay_hint_length).unwrap_or_default()) + .flatten() + .collect() +} + +fn get_inlay_hints( + db: &RootDatabase, + file_id: FileId, + node: &SyntaxNode, + max_inlay_hint_length: Option, +) -> Option> { + let analyzer = SourceAnalyzer::new(db, hir::Source::new(file_id.into(), node), None); + match_ast! { + match node { + ast::LetStmt(it) => { + if it.ascribed_type().is_some() { + return None; + } + let pat = it.pat()?; + Some(get_pat_type_hints(db, &analyzer, pat, false, max_inlay_hint_length)) + }, + ast::LambdaExpr(it) => { + it.param_list().map(|param_list| { + param_list + .params() + .filter(|closure_param| closure_param.ascribed_type().is_none()) + .filter_map(|closure_param| closure_param.pat()) + .map(|root_pat| get_pat_type_hints(db, &analyzer, root_pat, false, max_inlay_hint_length)) + .flatten() + .collect() + }) + }, + ast::ForExpr(it) => { + let pat = it.pat()?; + Some(get_pat_type_hints(db, &analyzer, pat, false, max_inlay_hint_length)) + }, + ast::IfExpr(it) => { + let pat = it.condition()?.pat()?; + Some(get_pat_type_hints(db, &analyzer, pat, true, max_inlay_hint_length)) + }, + ast::WhileExpr(it) => { + let pat = it.condition()?.pat()?; + Some(get_pat_type_hints(db, &analyzer, pat, true, max_inlay_hint_length)) + }, + ast::MatchArmList(it) => { + Some( + it + .arms() + .map(|match_arm| match_arm.pats()) + .flatten() + .map(|root_pat| get_pat_type_hints(db, &analyzer, root_pat, true, max_inlay_hint_length)) + .flatten() + .collect(), + ) + }, + _ => None, + } + } +} + +fn get_pat_type_hints( + db: &RootDatabase, + analyzer: &SourceAnalyzer, + root_pat: ast::Pat, + skip_root_pat_hint: bool, + max_inlay_hint_length: Option, +) -> Vec { + let original_pat = &root_pat.clone(); + + get_leaf_pats(root_pat) + .into_iter() + .filter(|pat| !skip_root_pat_hint || pat != original_pat) + .filter_map(|pat| { + let ty = analyzer.type_of_pat(db, &pat)?; + if ty.is_unknown() { + return None; + } + Some((pat.syntax().text_range(), ty)) + }) + .map(|(range, pat_type)| InlayHint { + range, + kind: InlayKind::TypeHint, + label: pat_type.display_truncated(db, max_inlay_hint_length).to_string().into(), + }) + .collect() +} + +fn get_leaf_pats(root_pat: ast::Pat) -> Vec { + let mut pats_to_process = std::collections::VecDeque::::new(); + pats_to_process.push_back(root_pat); + + let mut leaf_pats = Vec::new(); + + while let Some(maybe_leaf_pat) = pats_to_process.pop_front() { + match &maybe_leaf_pat { + ast::Pat::BindPat(bind_pat) => { + if let Some(pat) = bind_pat.pat() { + pats_to_process.push_back(pat); + } else { + leaf_pats.push(maybe_leaf_pat); + } + } + ast::Pat::TuplePat(tuple_pat) => { + for arg_pat in tuple_pat.args() { + pats_to_process.push_back(arg_pat); + } + } + ast::Pat::RecordPat(record_pat) => { + if let Some(pat_list) = record_pat.record_field_pat_list() { + pats_to_process.extend( + pat_list + .record_field_pats() + .filter_map(|record_field_pat| { + record_field_pat + .pat() + .filter(|pat| pat.syntax().kind() != SyntaxKind::BIND_PAT) + }) + .chain(pat_list.bind_pats().map(|bind_pat| { + bind_pat.pat().unwrap_or_else(|| ast::Pat::from(bind_pat)) + })), + ); + } + } + ast::Pat::TupleStructPat(tuple_struct_pat) => { + for arg_pat in tuple_struct_pat.args() { + pats_to_process.push_back(arg_pat); + } + } + _ => (), + } + } + leaf_pats +} + +#[cfg(test)] +mod tests { + use crate::mock_analysis::single_file; + use insta::assert_debug_snapshot; + + #[test] + fn let_statement() { + let (analysis, file_id) = single_file( + r#" +#[derive(PartialEq)] +enum CustomOption { + None, + Some(T), +} + +#[derive(PartialEq)] +struct Test { + a: CustomOption, + b: u8, +} + +fn main() { + struct InnerStruct {} + + let test = 54; + let test: i32 = 33; + let mut test = 33; + let _ = 22; + let test = "test"; + let test = InnerStruct {}; + + let test = vec![222]; + let test: Vec<_> = (0..3).collect(); + let test = (0..3).collect::>(); + let test = (0..3).collect::>(); + + let mut test = Vec::new(); + test.push(333); + + let test = (42, 'a'); + let (a, (b, c, (d, e), f)) = (2, (3, 4, (6.6, 7.7), 5)); +}"#, + ); + + assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###" + [ + InlayHint { + range: [193; 197), + kind: TypeHint, + label: "i32", + }, + InlayHint { + range: [236; 244), + kind: TypeHint, + label: "i32", + }, + InlayHint { + range: [275; 279), + kind: TypeHint, + label: "&str", + }, + InlayHint { + range: [539; 543), + kind: TypeHint, + label: "(i32, char)", + }, + InlayHint { + range: [566; 567), + kind: TypeHint, + label: "i32", + }, + InlayHint { + range: [570; 571), + kind: TypeHint, + label: "i32", + }, + InlayHint { + range: [573; 574), + kind: TypeHint, + label: "i32", + }, + InlayHint { + range: [584; 585), + kind: TypeHint, + label: "i32", + }, + InlayHint { + range: [577; 578), + kind: TypeHint, + label: "f64", + }, + InlayHint { + range: [580; 581), + kind: TypeHint, + label: "f64", + }, + ] + "### + ); + } + + #[test] + fn closure_parameter() { + let (analysis, file_id) = single_file( + r#" +fn main() { + let mut start = 0; + (0..2).for_each(|increment| { + start += increment; + }) +}"#, + ); + + assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###" + [ + InlayHint { + range: [21; 30), + kind: TypeHint, + label: "i32", + }, + InlayHint { + range: [57; 66), + kind: TypeHint, + label: "i32", + }, + ] + "### + ); + } + + #[test] + fn for_expression() { + let (analysis, file_id) = single_file( + r#" +fn main() { + let mut start = 0; + for increment in 0..2 { + start += increment; + } +}"#, + ); + + assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###" + [ + InlayHint { + range: [21; 30), + kind: TypeHint, + label: "i32", + }, + InlayHint { + range: [44; 53), + kind: TypeHint, + label: "i32", + }, + ] + "### + ); + } + + #[test] + fn if_expr() { + let (analysis, file_id) = single_file( + r#" +#[derive(PartialEq)] +enum CustomOption { + None, + Some(T), +} + +#[derive(PartialEq)] +struct Test { + a: CustomOption, + b: u8, +} + +fn main() { + let test = CustomOption::Some(Test { a: CustomOption::Some(3), b: 1 }); + if let CustomOption::None = &test {}; + if let test = &test {}; + if let CustomOption::Some(test) = &test {}; + if let CustomOption::Some(Test { a, b }) = &test {}; + if let CustomOption::Some(Test { a: x, b: y }) = &test {}; + if let CustomOption::Some(Test { a: CustomOption::Some(x), b: y }) = &test {}; + if let CustomOption::Some(Test { a: CustomOption::None, b: y }) = &test {}; + if let CustomOption::Some(Test { b: y, .. }) = &test {}; + + if test == CustomOption::None {} +}"#, + ); + + assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###" + [ + InlayHint { + range: [166; 170), + kind: TypeHint, + label: "CustomOption", + }, + InlayHint { + range: [334; 338), + kind: TypeHint, + label: "&Test", + }, + InlayHint { + range: [389; 390), + kind: TypeHint, + label: "&CustomOption", + }, + InlayHint { + range: [392; 393), + kind: TypeHint, + label: "&u8", + }, + InlayHint { + range: [531; 532), + kind: TypeHint, + label: "&u32", + }, + ] + "### + ); + } + + #[test] + fn while_expr() { + let (analysis, file_id) = single_file( + r#" +#[derive(PartialEq)] +enum CustomOption { + None, + Some(T), +} + +#[derive(PartialEq)] +struct Test { + a: CustomOption, + b: u8, +} + +fn main() { + let test = CustomOption::Some(Test { a: CustomOption::Some(3), b: 1 }); + while let CustomOption::None = &test {}; + while let test = &test {}; + while let CustomOption::Some(test) = &test {}; + while let CustomOption::Some(Test { a, b }) = &test {}; + while let CustomOption::Some(Test { a: x, b: y }) = &test {}; + while let CustomOption::Some(Test { a: CustomOption::Some(x), b: y }) = &test {}; + while let CustomOption::Some(Test { a: CustomOption::None, b: y }) = &test {}; + while let CustomOption::Some(Test { b: y, .. }) = &test {}; + + while test == CustomOption::None {} +}"#, + ); + + assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###" + [ + InlayHint { + range: [166; 170), + kind: TypeHint, + label: "CustomOption", + }, + InlayHint { + range: [343; 347), + kind: TypeHint, + label: "&Test", + }, + InlayHint { + range: [401; 402), + kind: TypeHint, + label: "&CustomOption", + }, + InlayHint { + range: [404; 405), + kind: TypeHint, + label: "&u8", + }, + InlayHint { + range: [549; 550), + kind: TypeHint, + label: "&u32", + }, + ] + "### + ); + } + + #[test] + fn match_arm_list() { + let (analysis, file_id) = single_file( + r#" +#[derive(PartialEq)] +enum CustomOption { + None, + Some(T), +} + +#[derive(PartialEq)] +struct Test { + a: CustomOption, + b: u8, +} + +fn main() { + match CustomOption::Some(Test { a: CustomOption::Some(3), b: 1 }) { + CustomOption::None => (), + test => (), + CustomOption::Some(test) => (), + CustomOption::Some(Test { a, b }) => (), + CustomOption::Some(Test { a: x, b: y }) => (), + CustomOption::Some(Test { a: CustomOption::Some(x), b: y }) => (), + CustomOption::Some(Test { a: CustomOption::None, b: y }) => (), + CustomOption::Some(Test { b: y, .. }) => (), + _ => {} + } +}"#, + ); + + assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###" + [ + InlayHint { + range: [311; 315), + kind: TypeHint, + label: "Test", + }, + InlayHint { + range: [358; 359), + kind: TypeHint, + label: "CustomOption", + }, + InlayHint { + range: [361; 362), + kind: TypeHint, + label: "u8", + }, + InlayHint { + range: [484; 485), + kind: TypeHint, + label: "u32", + }, + ] + "### + ); + } + + #[test] + fn hint_truncation() { + let (analysis, file_id) = single_file( + r#" +struct Smol(T); + +struct VeryLongOuterName(T); + +fn main() { + let a = Smol(0u32); + let b = VeryLongOuterName(0usize); + let c = Smol(Smol(0u32)) +}"#, + ); + + assert_debug_snapshot!(analysis.inlay_hints(file_id, Some(8)).unwrap(), @r###" + [ + InlayHint { + range: [74; 75), + kind: TypeHint, + label: "Smol", + }, + InlayHint { + range: [98; 99), + kind: TypeHint, + label: "VeryLongOuterName<…>", + }, + InlayHint { + range: [137; 138), + kind: TypeHint, + label: "Smol>", + }, + ] + "### + ); + } +} diff --git a/crates/ra_ide/src/join_lines.rs b/crates/ra_ide/src/join_lines.rs new file mode 100644 index 000000000..7deeb3494 --- /dev/null +++ b/crates/ra_ide/src/join_lines.rs @@ -0,0 +1,611 @@ +//! FIXME: write short doc here + +use itertools::Itertools; +use ra_fmt::{compute_ws, extract_trivial_expression}; +use ra_syntax::{ + algo::{find_covering_element, non_trivia_sibling}, + ast::{self, AstNode, AstToken}, + Direction, NodeOrToken, SourceFile, + SyntaxKind::{self, WHITESPACE}, + SyntaxNode, SyntaxToken, TextRange, TextUnit, T, +}; +use ra_text_edit::{TextEdit, TextEditBuilder}; + +pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit { + let range = if range.is_empty() { + let syntax = file.syntax(); + let text = syntax.text().slice(range.start()..); + let pos = match text.find_char('\n') { + None => return TextEditBuilder::default().finish(), + Some(pos) => pos, + }; + TextRange::offset_len(range.start() + pos, TextUnit::of_char('\n')) + } else { + range + }; + + let node = match find_covering_element(file.syntax(), range) { + NodeOrToken::Node(node) => node, + NodeOrToken::Token(token) => token.parent(), + }; + let mut edit = TextEditBuilder::default(); + for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) { + let range = match range.intersection(&token.text_range()) { + Some(range) => range, + None => continue, + } - token.text_range().start(); + let text = token.text(); + for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') { + let pos: TextUnit = (pos as u32).into(); + let off = token.text_range().start() + range.start() + pos; + if !edit.invalidates_offset(off) { + remove_newline(&mut edit, &token, off); + } + } + } + + edit.finish() +} + +fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextUnit) { + if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 { + // The node is either the first or the last in the file + let suff = &token.text()[TextRange::from_to( + offset - token.text_range().start() + TextUnit::of_char('\n'), + TextUnit::of_str(token.text()), + )]; + let spaces = suff.bytes().take_while(|&b| b == b' ').count(); + + edit.replace(TextRange::offset_len(offset, ((spaces + 1) as u32).into()), " ".to_string()); + return; + } + + // Special case that turns something like: + // + // ``` + // my_function({<|> + // + // }) + // ``` + // + // into `my_function()` + if join_single_expr_block(edit, token).is_some() { + return; + } + // ditto for + // + // ``` + // use foo::{<|> + // bar + // }; + // ``` + if join_single_use_tree(edit, token).is_some() { + return; + } + + // The node is between two other nodes + let prev = token.prev_sibling_or_token().unwrap(); + let next = token.next_sibling_or_token().unwrap(); + if is_trailing_comma(prev.kind(), next.kind()) { + // Removes: trailing comma, newline (incl. surrounding whitespace) + edit.delete(TextRange::from_to(prev.text_range().start(), token.text_range().end())); + } else if prev.kind() == T![,] && next.kind() == T!['}'] { + // Removes: comma, newline (incl. surrounding whitespace) + let space = if let Some(left) = prev.prev_sibling_or_token() { + compute_ws(left.kind(), next.kind()) + } else { + " " + }; + edit.replace( + TextRange::from_to(prev.text_range().start(), token.text_range().end()), + space.to_string(), + ); + } else if let (Some(_), Some(next)) = ( + prev.as_token().cloned().and_then(ast::Comment::cast), + next.as_token().cloned().and_then(ast::Comment::cast), + ) { + // Removes: newline (incl. surrounding whitespace), start of the next comment + edit.delete(TextRange::from_to( + token.text_range().start(), + next.syntax().text_range().start() + TextUnit::of_str(next.prefix()), + )); + } else { + // Remove newline but add a computed amount of whitespace characters + edit.replace(token.text_range(), compute_ws(prev.kind(), next.kind()).to_string()); + } +} + +fn has_comma_after(node: &SyntaxNode) -> bool { + match non_trivia_sibling(node.clone().into(), Direction::Next) { + Some(n) => n.kind() == T![,], + _ => false, + } +} + +fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { + let block = ast::Block::cast(token.parent())?; + let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?; + let expr = extract_trivial_expression(&block_expr)?; + + let block_range = block_expr.syntax().text_range(); + let mut buf = expr.syntax().text().to_string(); + + // Match block needs to have a comma after the block + if let Some(match_arm) = block_expr.syntax().parent().and_then(ast::MatchArm::cast) { + if !has_comma_after(match_arm.syntax()) { + buf.push(','); + } + } + + edit.replace(block_range, buf); + + Some(()) +} + +fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { + let use_tree_list = ast::UseTreeList::cast(token.parent())?; + let (tree,) = use_tree_list.use_trees().collect_tuple()?; + edit.replace(use_tree_list.syntax().text_range(), tree.syntax().text().to_string()); + Some(()) +} + +fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool { + match (left, right) { + (T![,], T![')']) | (T![,], T![']']) => true, + _ => false, + } +} + +#[cfg(test)] +mod tests { + use crate::test_utils::{assert_eq_text, check_action, extract_range}; + + use super::*; + + fn check_join_lines(before: &str, after: &str) { + check_action(before, after, |file, offset| { + let range = TextRange::offset_len(offset, 0.into()); + let res = join_lines(file, range); + Some(res) + }) + } + + #[test] + fn test_join_lines_comma() { + check_join_lines( + r" +fn foo() { + <|>foo(1, + ) +} +", + r" +fn foo() { + <|>foo(1) +} +", + ); + } + + #[test] + fn test_join_lines_lambda_block() { + check_join_lines( + r" +pub fn reparse(&self, edit: &AtomTextEdit) -> File { + <|>self.incremental_reparse(edit).unwrap_or_else(|| { + self.full_reparse(edit) + }) +} +", + r" +pub fn reparse(&self, edit: &AtomTextEdit) -> File { + <|>self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) +} +", + ); + } + + #[test] + fn test_join_lines_block() { + check_join_lines( + r" +fn foo() { + foo(<|>{ + 92 + }) +}", + r" +fn foo() { + foo(<|>92) +}", + ); + } + + #[test] + fn join_lines_adds_comma_for_block_in_match_arm() { + check_join_lines( + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>{ + u.foo() + } + Err(v) => v, + } +}", + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>u.foo(), + Err(v) => v, + } +}", + ); + } + + #[test] + fn join_lines_multiline_in_block() { + check_join_lines( + r" +fn foo() { + match ty { + <|> Some(ty) => { + match ty { + _ => false, + } + } + _ => true, + } +} +", + r" +fn foo() { + match ty { + <|> Some(ty) => match ty { + _ => false, + }, + _ => true, + } +} +", + ); + } + + #[test] + fn join_lines_keeps_comma_for_block_in_match_arm() { + // We already have a comma + check_join_lines( + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>{ + u.foo() + }, + Err(v) => v, + } +}", + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>u.foo(), + Err(v) => v, + } +}", + ); + + // comma with whitespace between brace and , + check_join_lines( + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>{ + u.foo() + } , + Err(v) => v, + } +}", + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>u.foo() , + Err(v) => v, + } +}", + ); + + // comma with newline between brace and , + check_join_lines( + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>{ + u.foo() + } + , + Err(v) => v, + } +}", + r" +fn foo(e: Result) { + match e { + Ok(u) => <|>u.foo() + , + Err(v) => v, + } +}", + ); + } + + #[test] + fn join_lines_keeps_comma_with_single_arg_tuple() { + // A single arg tuple + check_join_lines( + r" +fn foo() { + let x = (<|>{ + 4 + },); +}", + r" +fn foo() { + let x = (<|>4,); +}", + ); + + // single arg tuple with whitespace between brace and comma + check_join_lines( + r" +fn foo() { + let x = (<|>{ + 4 + } ,); +}", + r" +fn foo() { + let x = (<|>4 ,); +}", + ); + + // single arg tuple with newline between brace and comma + check_join_lines( + r" +fn foo() { + let x = (<|>{ + 4 + } + ,); +}", + r" +fn foo() { + let x = (<|>4 + ,); +}", + ); + } + + #[test] + fn test_join_lines_use_items_left() { + // No space after the '{' + check_join_lines( + r" +<|>use ra_syntax::{ + TextUnit, TextRange, +};", + r" +<|>use ra_syntax::{TextUnit, TextRange, +};", + ); + } + + #[test] + fn test_join_lines_use_items_right() { + // No space after the '}' + check_join_lines( + r" +use ra_syntax::{ +<|> TextUnit, TextRange +};", + r" +use ra_syntax::{ +<|> TextUnit, TextRange};", + ); + } + + #[test] + fn test_join_lines_use_items_right_comma() { + // No space after the '}' + check_join_lines( + r" +use ra_syntax::{ +<|> TextUnit, TextRange, +};", + r" +use ra_syntax::{ +<|> TextUnit, TextRange};", + ); + } + + #[test] + fn test_join_lines_use_tree() { + check_join_lines( + r" +use ra_syntax::{ + algo::<|>{ + find_token_at_offset, + }, + ast, +};", + r" +use ra_syntax::{ + algo::<|>find_token_at_offset, + ast, +};", + ); + } + + #[test] + fn test_join_lines_normal_comments() { + check_join_lines( + r" +fn foo() { + // Hello<|> + // world! +} +", + r" +fn foo() { + // Hello<|> world! +} +", + ); + } + + #[test] + fn test_join_lines_doc_comments() { + check_join_lines( + r" +fn foo() { + /// Hello<|> + /// world! +} +", + r" +fn foo() { + /// Hello<|> world! +} +", + ); + } + + #[test] + fn test_join_lines_mod_comments() { + check_join_lines( + r" +fn foo() { + //! Hello<|> + //! world! +} +", + r" +fn foo() { + //! Hello<|> world! +} +", + ); + } + + #[test] + fn test_join_lines_multiline_comments_1() { + check_join_lines( + r" +fn foo() { + // Hello<|> + /* world! */ +} +", + r" +fn foo() { + // Hello<|> world! */ +} +", + ); + } + + #[test] + fn test_join_lines_multiline_comments_2() { + check_join_lines( + r" +fn foo() { + // The<|> + /* quick + brown + fox! */ +} +", + r" +fn foo() { + // The<|> quick + brown + fox! */ +} +", + ); + } + + fn check_join_lines_sel(before: &str, after: &str) { + let (sel, before) = extract_range(before); + let parse = SourceFile::parse(&before); + let result = join_lines(&parse.tree(), sel); + let actual = result.apply(&before); + assert_eq_text!(after, &actual); + } + + #[test] + fn test_join_lines_selection_fn_args() { + check_join_lines_sel( + r" +fn foo() { + <|>foo(1, + 2, + 3, + <|>) +} + ", + r" +fn foo() { + foo(1, 2, 3) +} + ", + ); + } + + #[test] + fn test_join_lines_selection_struct() { + check_join_lines_sel( + r" +struct Foo <|>{ + f: u32, +}<|> + ", + r" +struct Foo { f: u32 } + ", + ); + } + + #[test] + fn test_join_lines_selection_dot_chain() { + check_join_lines_sel( + r" +fn foo() { + join(<|>type_params.type_params() + .filter_map(|it| it.name()) + .map(|it| it.text())<|>) +}", + r" +fn foo() { + join(type_params.type_params().filter_map(|it| it.name()).map(|it| it.text())) +}", + ); + } + + #[test] + fn test_join_lines_selection_lambda_block_body() { + check_join_lines_sel( + r" +pub fn handle_find_matching_brace() { + params.offsets + .map(|offset| <|>{ + world.analysis().matching_brace(&file, offset).unwrap_or(offset) + }<|>) + .collect(); +}", + r" +pub fn handle_find_matching_brace() { + params.offsets + .map(|offset| world.analysis().matching_brace(&file, offset).unwrap_or(offset)) + .collect(); +}", + ); + } +} diff --git a/crates/ra_ide/src/lib.rs b/crates/ra_ide/src/lib.rs new file mode 100644 index 000000000..d1bff4a76 --- /dev/null +++ b/crates/ra_ide/src/lib.rs @@ -0,0 +1,489 @@ +//! ra_ide crate provides "ide-centric" APIs for the rust-analyzer. That is, +//! it generally operates with files and text ranges, and returns results as +//! Strings, suitable for displaying to the human. +//! +//! What powers this API are the `RootDatabase` struct, which defines a `salsa` +//! database, and the `ra_hir` crate, where majority of the analysis happens. +//! However, IDE specific bits of the analysis (most notably completion) happen +//! in this crate. + +// For proving that RootDatabase is RefUnwindSafe. +#![recursion_limit = "128"] + +mod db; +pub mod mock_analysis; +mod symbol_index; +mod change; +mod source_change; +mod feature_flags; + +mod status; +mod completion; +mod runnables; +mod goto_definition; +mod goto_type_definition; +mod extend_selection; +mod hover; +mod call_info; +mod syntax_highlighting; +mod parent_module; +mod references; +mod impls; +mod assists; +mod diagnostics; +mod syntax_tree; +mod folding_ranges; +mod line_index; +mod line_index_utils; +mod join_lines; +mod typing; +mod matching_brace; +mod display; +mod inlay_hints; +mod wasm_shims; +mod expand; +mod expand_macro; + +#[cfg(test)] +mod marks; +#[cfg(test)] +mod test_utils; + +use std::sync::Arc; + +use ra_cfg::CfgOptions; +use ra_db::{ + salsa::{self, ParallelDatabase}, + CheckCanceled, Env, FileLoader, SourceDatabase, +}; +use ra_syntax::{SourceFile, TextRange, TextUnit}; + +use crate::{db::LineIndexDatabase, display::ToNav, symbol_index::FileSymbol}; + +pub use crate::{ + assists::{Assist, AssistId}, + change::{AnalysisChange, LibraryData}, + completion::{CompletionItem, CompletionItemKind, InsertTextFormat}, + diagnostics::Severity, + display::{file_structure, FunctionSignature, NavigationTarget, StructureNode}, + expand_macro::ExpandedMacro, + feature_flags::FeatureFlags, + folding_ranges::{Fold, FoldKind}, + hover::HoverResult, + inlay_hints::{InlayHint, InlayKind}, + line_index::{LineCol, LineIndex}, + line_index_utils::translate_offset_with_edit, + references::{ReferenceSearchResult, SearchScope}, + runnables::{Runnable, RunnableKind}, + source_change::{FileSystemEdit, SourceChange, SourceFileEdit}, + syntax_highlighting::HighlightedRange, +}; + +pub use hir::Documentation; +pub use ra_db::{ + Canceled, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRootId, +}; + +pub type Cancelable = Result; + +#[derive(Debug)] +pub struct Diagnostic { + pub message: String, + pub range: TextRange, + pub fix: Option, + pub severity: Severity, +} + +#[derive(Debug)] +pub struct Query { + query: String, + lowercased: String, + only_types: bool, + libs: bool, + exact: bool, + limit: usize, +} + +impl Query { + pub fn new(query: String) -> Query { + let lowercased = query.to_lowercase(); + Query { + query, + lowercased, + only_types: false, + libs: false, + exact: false, + limit: usize::max_value(), + } + } + + pub fn only_types(&mut self) { + self.only_types = true; + } + + pub fn libs(&mut self) { + self.libs = true; + } + + pub fn exact(&mut self) { + self.exact = true; + } + + pub fn limit(&mut self, limit: usize) { + self.limit = limit + } +} + +/// Info associated with a text range. +#[derive(Debug)] +pub struct RangeInfo { + pub range: TextRange, + pub info: T, +} + +impl RangeInfo { + pub fn new(range: TextRange, info: T) -> RangeInfo { + RangeInfo { range, info } + } +} + +/// Contains information about a call site. Specifically the +/// `FunctionSignature`and current parameter. +#[derive(Debug)] +pub struct CallInfo { + pub signature: FunctionSignature, + pub active_parameter: Option, +} + +/// `AnalysisHost` stores the current state of the world. +#[derive(Debug)] +pub struct AnalysisHost { + db: db::RootDatabase, +} + +impl Default for AnalysisHost { + fn default() -> AnalysisHost { + AnalysisHost::new(None, FeatureFlags::default()) + } +} + +impl AnalysisHost { + pub fn new(lru_capcity: Option, feature_flags: FeatureFlags) -> AnalysisHost { + AnalysisHost { db: db::RootDatabase::new(lru_capcity, feature_flags) } + } + /// Returns a snapshot of the current state, which you can query for + /// semantic information. + pub fn analysis(&self) -> Analysis { + Analysis { db: self.db.snapshot() } + } + + pub fn feature_flags(&self) -> &FeatureFlags { + &self.db.feature_flags + } + + /// Applies changes to the current state of the world. If there are + /// outstanding snapshots, they will be canceled. + pub fn apply_change(&mut self, change: AnalysisChange) { + self.db.apply_change(change) + } + + pub fn maybe_collect_garbage(&mut self) { + self.db.maybe_collect_garbage(); + } + + pub fn collect_garbage(&mut self) { + self.db.collect_garbage(); + } + /// NB: this clears the database + pub fn per_query_memory_usage(&mut self) -> Vec<(String, ra_prof::Bytes)> { + self.db.per_query_memory_usage() + } + pub fn raw_database( + &self, + ) -> &(impl hir::db::HirDatabase + salsa::Database + ra_db::SourceDatabaseExt) { + &self.db + } + pub fn raw_database_mut( + &mut self, + ) -> &mut (impl hir::db::HirDatabase + salsa::Database + ra_db::SourceDatabaseExt) { + &mut self.db + } +} + +/// Analysis is a snapshot of a world state at a moment in time. It is the main +/// entry point for asking semantic information about the world. When the world +/// state is advanced using `AnalysisHost::apply_change` method, all existing +/// `Analysis` are canceled (most method return `Err(Canceled)`). +#[derive(Debug)] +pub struct Analysis { + db: salsa::Snapshot, +} + +// As a general design guideline, `Analysis` API are intended to be independent +// from the language server protocol. That is, when exposing some functionality +// we should think in terms of "what API makes most sense" and not in terms of +// "what types LSP uses". Although currently LSP is the only consumer of the +// API, the API should in theory be usable as a library, or via a different +// protocol. +impl Analysis { + // Creates an analysis instance for a single file, without any extenal + // dependencies, stdlib support or ability to apply changes. See + // `AnalysisHost` for creating a fully-featured analysis. + pub fn from_single_file(text: String) -> (Analysis, FileId) { + let mut host = AnalysisHost::default(); + let source_root = SourceRootId(0); + let mut change = AnalysisChange::new(); + change.add_root(source_root, true); + let mut crate_graph = CrateGraph::default(); + let file_id = FileId(0); + // FIXME: cfg options + // Default to enable test for single file. + let mut cfg_options = CfgOptions::default(); + cfg_options.insert_atom("test".into()); + crate_graph.add_crate_root(file_id, Edition::Edition2018, cfg_options, Env::default()); + change.add_file(source_root, file_id, "main.rs".into(), Arc::new(text)); + change.set_crate_graph(crate_graph); + host.apply_change(change); + (host.analysis(), file_id) + } + + /// Features for Analysis. + pub fn feature_flags(&self) -> &FeatureFlags { + &self.db.feature_flags + } + + /// Debug info about the current state of the analysis. + pub fn status(&self) -> Cancelable { + self.with_db(|db| status::status(&*db)) + } + + /// Gets the text of the source file. + pub fn file_text(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| db.file_text(file_id)) + } + + /// Gets the syntax tree of the file. + pub fn parse(&self, file_id: FileId) -> Cancelable { + self.with_db(|db| db.parse(file_id).tree()) + } + + /// Gets the file's `LineIndex`: data structure to convert between absolute + /// offsets and line/column representation. + pub fn file_line_index(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| db.line_index(file_id)) + } + + /// Selects the next syntactic nodes encompassing the range. + pub fn extend_selection(&self, frange: FileRange) -> Cancelable { + self.with_db(|db| extend_selection::extend_selection(db, frange)) + } + + /// Returns position of the matching brace (all types of braces are + /// supported). + pub fn matching_brace(&self, position: FilePosition) -> Cancelable> { + self.with_db(|db| { + let parse = db.parse(position.file_id); + let file = parse.tree(); + matching_brace::matching_brace(&file, position.offset) + }) + } + + /// Returns a syntax tree represented as `String`, for debug purposes. + // FIXME: use a better name here. + pub fn syntax_tree( + &self, + file_id: FileId, + text_range: Option, + ) -> Cancelable { + self.with_db(|db| syntax_tree::syntax_tree(&db, file_id, text_range)) + } + + pub fn expand_macro(&self, position: FilePosition) -> Cancelable> { + self.with_db(|db| expand_macro::expand_macro(db, position)) + } + + /// Returns an edit to remove all newlines in the range, cleaning up minor + /// stuff like trailing commas. + pub fn join_lines(&self, frange: FileRange) -> Cancelable { + self.with_db(|db| { + let parse = db.parse(frange.file_id); + let file_edit = SourceFileEdit { + file_id: frange.file_id, + edit: join_lines::join_lines(&parse.tree(), frange.range), + }; + SourceChange::source_file_edit("join lines", file_edit) + }) + } + + /// Returns an edit which should be applied when opening a new line, fixing + /// up minor stuff like continuing the comment. + pub fn on_enter(&self, position: FilePosition) -> Cancelable> { + self.with_db(|db| typing::on_enter(&db, position)) + } + + /// Returns an edit which should be applied after a character was typed. + /// + /// This is useful for some on-the-fly fixups, like adding `;` to `let =` + /// automatically. + pub fn on_char_typed( + &self, + position: FilePosition, + char_typed: char, + ) -> Cancelable> { + // Fast path to not even parse the file. + if !typing::TRIGGER_CHARS.contains(char_typed) { + return Ok(None); + } + self.with_db(|db| typing::on_char_typed(&db, position, char_typed)) + } + + /// Returns a tree representation of symbols in the file. Useful to draw a + /// file outline. + pub fn file_structure(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| file_structure(&db.parse(file_id).tree())) + } + + /// Returns a list of the places in the file where type hints can be displayed. + pub fn inlay_hints( + &self, + file_id: FileId, + max_inlay_hint_length: Option, + ) -> Cancelable> { + self.with_db(|db| { + inlay_hints::inlay_hints(db, file_id, &db.parse(file_id).tree(), max_inlay_hint_length) + }) + } + + /// Returns the set of folding ranges. + pub fn folding_ranges(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| folding_ranges::folding_ranges(&db.parse(file_id).tree())) + } + + /// Fuzzy searches for a symbol. + pub fn symbol_search(&self, query: Query) -> Cancelable> { + self.with_db(|db| { + symbol_index::world_symbols(db, query) + .into_iter() + .map(|s| s.to_nav(db)) + .collect::>() + }) + } + + /// Returns the definitions from the symbol at `position`. + pub fn goto_definition( + &self, + position: FilePosition, + ) -> Cancelable>>> { + self.with_db(|db| goto_definition::goto_definition(db, position)) + } + + /// Returns the impls from the symbol at `position`. + pub fn goto_implementation( + &self, + position: FilePosition, + ) -> Cancelable>>> { + self.with_db(|db| impls::goto_implementation(db, position)) + } + + /// Returns the type definitions for the symbol at `position`. + pub fn goto_type_definition( + &self, + position: FilePosition, + ) -> Cancelable>>> { + self.with_db(|db| goto_type_definition::goto_type_definition(db, position)) + } + + /// Finds all usages of the reference at point. + pub fn find_all_refs( + &self, + position: FilePosition, + search_scope: Option, + ) -> Cancelable> { + self.with_db(|db| references::find_all_refs(db, position, search_scope).map(|it| it.info)) + } + + /// Returns a short text describing element at position. + pub fn hover(&self, position: FilePosition) -> Cancelable>> { + self.with_db(|db| hover::hover(db, position)) + } + + /// Computes parameter information for the given call expression. + pub fn call_info(&self, position: FilePosition) -> Cancelable> { + self.with_db(|db| call_info::call_info(db, position)) + } + + /// Returns a `mod name;` declaration which created the current module. + pub fn parent_module(&self, position: FilePosition) -> Cancelable> { + self.with_db(|db| parent_module::parent_module(db, position)) + } + + /// Returns crates this file belongs too. + pub fn crate_for(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| parent_module::crate_for(db, file_id)) + } + + /// Returns the root file of the given crate. + pub fn crate_root(&self, crate_id: CrateId) -> Cancelable { + self.with_db(|db| db.crate_graph().crate_root(crate_id)) + } + + /// Returns the set of possible targets to run for the current file. + pub fn runnables(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| runnables::runnables(db, file_id)) + } + + /// Computes syntax highlighting for the given file. + pub fn highlight(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| syntax_highlighting::highlight(db, file_id)) + } + + /// Computes syntax highlighting for the given file. + pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancelable { + self.with_db(|db| syntax_highlighting::highlight_as_html(db, file_id, rainbow)) + } + + /// Computes completions at the given position. + pub fn completions(&self, position: FilePosition) -> Cancelable>> { + self.with_db(|db| completion::completions(db, position).map(Into::into)) + } + + /// Computes assists (aka code actions aka intentions) for the given + /// position. + pub fn assists(&self, frange: FileRange) -> Cancelable> { + self.with_db(|db| assists::assists(db, frange)) + } + + /// Computes the set of diagnostics for the given file. + pub fn diagnostics(&self, file_id: FileId) -> Cancelable> { + self.with_db(|db| diagnostics::diagnostics(db, file_id)) + } + + /// Computes the type of the expression at the given position. + pub fn type_of(&self, frange: FileRange) -> Cancelable> { + self.with_db(|db| hover::type_of(db, frange)) + } + + /// Returns the edit required to rename reference at the position to the new + /// name. + pub fn rename( + &self, + position: FilePosition, + new_name: &str, + ) -> Cancelable>> { + self.with_db(|db| references::rename(db, position, new_name)) + } + + /// Performs an operation on that may be Canceled. + fn with_db T + std::panic::UnwindSafe, T>( + &self, + f: F, + ) -> Cancelable { + self.db.catch_canceled(f) + } +} + +#[test] +fn analysis_is_send() { + fn is_send() {} + is_send::(); +} diff --git a/crates/ra_ide/src/line_index.rs b/crates/ra_ide/src/line_index.rs new file mode 100644 index 000000000..710890d27 --- /dev/null +++ b/crates/ra_ide/src/line_index.rs @@ -0,0 +1,283 @@ +//! FIXME: write short doc here + +use crate::TextUnit; +use rustc_hash::FxHashMap; +use superslice::Ext; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct LineIndex { + pub(crate) newlines: Vec, + pub(crate) utf16_lines: FxHashMap>, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct LineCol { + /// Zero-based + pub line: u32, + /// Zero-based + pub col_utf16: u32, +} + +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub(crate) struct Utf16Char { + pub(crate) start: TextUnit, + pub(crate) end: TextUnit, +} + +impl Utf16Char { + fn len(&self) -> TextUnit { + self.end - self.start + } +} + +impl LineIndex { + pub fn new(text: &str) -> LineIndex { + let mut utf16_lines = FxHashMap::default(); + let mut utf16_chars = Vec::new(); + + let mut newlines = vec![0.into()]; + let mut curr_row = 0.into(); + let mut curr_col = 0.into(); + let mut line = 0; + for c in text.chars() { + curr_row += TextUnit::of_char(c); + if c == '\n' { + newlines.push(curr_row); + + // Save any utf-16 characters seen in the previous line + if !utf16_chars.is_empty() { + utf16_lines.insert(line, utf16_chars); + utf16_chars = Vec::new(); + } + + // Prepare for processing the next line + curr_col = 0.into(); + line += 1; + continue; + } + + let char_len = TextUnit::of_char(c); + if char_len.to_usize() > 1 { + utf16_chars.push(Utf16Char { start: curr_col, end: curr_col + char_len }); + } + + curr_col += char_len; + } + + // Save any utf-16 characters seen in the last line + if !utf16_chars.is_empty() { + utf16_lines.insert(line, utf16_chars); + } + + LineIndex { newlines, utf16_lines } + } + + pub fn line_col(&self, offset: TextUnit) -> LineCol { + let line = self.newlines.upper_bound(&offset) - 1; + let line_start_offset = self.newlines[line]; + let col = offset - line_start_offset; + + LineCol { line: line as u32, col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32 } + } + + pub fn offset(&self, line_col: LineCol) -> TextUnit { + //FIXME: return Result + let col = self.utf16_to_utf8_col(line_col.line, line_col.col_utf16); + self.newlines[line_col.line as usize] + col + } + + fn utf8_to_utf16_col(&self, line: u32, mut col: TextUnit) -> usize { + if let Some(utf16_chars) = self.utf16_lines.get(&line) { + let mut correction = TextUnit::from_usize(0); + for c in utf16_chars { + if col >= c.end { + correction += c.len() - TextUnit::from_usize(1); + } else { + // From here on, all utf16 characters come *after* the character we are mapping, + // so we don't need to take them into account + break; + } + } + + col -= correction; + } + + col.to_usize() + } + + fn utf16_to_utf8_col(&self, line: u32, col: u32) -> TextUnit { + let mut col: TextUnit = col.into(); + if let Some(utf16_chars) = self.utf16_lines.get(&line) { + for c in utf16_chars { + if col >= c.start { + col += c.len() - TextUnit::from_usize(1); + } else { + // From here on, all utf16 characters come *after* the character we are mapping, + // so we don't need to take them into account + break; + } + } + } + + col + } +} + +#[cfg(test)] +/// Simple reference implementation to use in proptests +pub fn to_line_col(text: &str, offset: TextUnit) -> LineCol { + let mut res = LineCol { line: 0, col_utf16: 0 }; + for (i, c) in text.char_indices() { + if i + c.len_utf8() > offset.to_usize() { + // if it's an invalid offset, inside a multibyte char + // return as if it was at the start of the char + break; + } + if c == '\n' { + res.line += 1; + res.col_utf16 = 0; + } else { + res.col_utf16 += 1; + } + } + res +} + +#[cfg(test)] +mod test_line_index { + use super::*; + use proptest::{prelude::*, proptest}; + use ra_text_edit::test_utils::{arb_offset, arb_text}; + + #[test] + fn test_line_index() { + let text = "hello\nworld"; + let index = LineIndex::new(text); + assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 }); + assert_eq!(index.line_col(1.into()), LineCol { line: 0, col_utf16: 1 }); + assert_eq!(index.line_col(5.into()), LineCol { line: 0, col_utf16: 5 }); + assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 0 }); + assert_eq!(index.line_col(7.into()), LineCol { line: 1, col_utf16: 1 }); + assert_eq!(index.line_col(8.into()), LineCol { line: 1, col_utf16: 2 }); + assert_eq!(index.line_col(10.into()), LineCol { line: 1, col_utf16: 4 }); + assert_eq!(index.line_col(11.into()), LineCol { line: 1, col_utf16: 5 }); + assert_eq!(index.line_col(12.into()), LineCol { line: 1, col_utf16: 6 }); + + let text = "\nhello\nworld"; + let index = LineIndex::new(text); + assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 }); + assert_eq!(index.line_col(1.into()), LineCol { line: 1, col_utf16: 0 }); + assert_eq!(index.line_col(2.into()), LineCol { line: 1, col_utf16: 1 }); + assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 5 }); + assert_eq!(index.line_col(7.into()), LineCol { line: 2, col_utf16: 0 }); + } + + fn arb_text_with_offset() -> BoxedStrategy<(TextUnit, String)> { + arb_text().prop_flat_map(|text| (arb_offset(&text), Just(text))).boxed() + } + + fn to_line_col(text: &str, offset: TextUnit) -> LineCol { + let mut res = LineCol { line: 0, col_utf16: 0 }; + for (i, c) in text.char_indices() { + if i + c.len_utf8() > offset.to_usize() { + // if it's an invalid offset, inside a multibyte char + // return as if it was at the start of the char + break; + } + if c == '\n' { + res.line += 1; + res.col_utf16 = 0; + } else { + res.col_utf16 += 1; + } + } + res + } + + proptest! { + #[test] + fn test_line_index_proptest((offset, text) in arb_text_with_offset()) { + let expected = to_line_col(&text, offset); + let line_index = LineIndex::new(&text); + let actual = line_index.line_col(offset); + + assert_eq!(actual, expected); + } + } +} + +#[cfg(test)] +mod test_utf8_utf16_conv { + use super::*; + + #[test] + fn test_char_len() { + assert_eq!('メ'.len_utf8(), 3); + assert_eq!('メ'.len_utf16(), 1); + } + + #[test] + fn test_empty_index() { + let col_index = LineIndex::new( + " +const C: char = 'x'; +", + ); + assert_eq!(col_index.utf16_lines.len(), 0); + } + + #[test] + fn test_single_char() { + let col_index = LineIndex::new( + " +const C: char = 'メ'; +", + ); + + assert_eq!(col_index.utf16_lines.len(), 1); + assert_eq!(col_index.utf16_lines[&1].len(), 1); + assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); + + // UTF-8 to UTF-16, no changes + assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); + + // UTF-8 to UTF-16 + assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20); + + // UTF-16 to UTF-8, no changes + assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextUnit::from(15)); + + // UTF-16 to UTF-8 + assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextUnit::from(21)); + } + + #[test] + fn test_string() { + let col_index = LineIndex::new( + " +const C: char = \"メ メ\"; +", + ); + + assert_eq!(col_index.utf16_lines.len(), 1); + assert_eq!(col_index.utf16_lines[&1].len(), 2); + assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); + assert_eq!(col_index.utf16_lines[&1][1], Utf16Char { start: 21.into(), end: 24.into() }); + + // UTF-8 to UTF-16 + assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); + + assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19); + assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21); + + assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15); + + // UTF-16 to UTF-8 + assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextUnit::from_usize(15)); + + assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextUnit::from_usize(20)); + assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextUnit::from_usize(23)); + + assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextUnit::from_usize(15)); + } +} diff --git a/crates/ra_ide/src/line_index_utils.rs b/crates/ra_ide/src/line_index_utils.rs new file mode 100644 index 000000000..bd1e08feb --- /dev/null +++ b/crates/ra_ide/src/line_index_utils.rs @@ -0,0 +1,331 @@ +//! FIXME: write short doc here + +use crate::{line_index::Utf16Char, LineCol, LineIndex}; +use ra_syntax::{TextRange, TextUnit}; +use ra_text_edit::{AtomTextEdit, TextEdit}; + +#[derive(Debug, Clone)] +enum Step { + Newline(TextUnit), + Utf16Char(TextRange), +} + +#[derive(Debug)] +struct LineIndexStepIter<'a> { + line_index: &'a LineIndex, + next_newline_idx: usize, + utf16_chars: Option<(TextUnit, std::slice::Iter<'a, Utf16Char>)>, +} + +impl<'a> LineIndexStepIter<'a> { + fn from(line_index: &LineIndex) -> LineIndexStepIter { + let mut x = LineIndexStepIter { line_index, next_newline_idx: 0, utf16_chars: None }; + // skip first newline since it's not real + x.next(); + x + } +} + +impl<'a> Iterator for LineIndexStepIter<'a> { + type Item = Step; + fn next(&mut self) -> Option { + self.utf16_chars + .as_mut() + .and_then(|(newline, x)| { + let x = x.next()?; + Some(Step::Utf16Char(TextRange::from_to(*newline + x.start, *newline + x.end))) + }) + .or_else(|| { + let next_newline = *self.line_index.newlines.get(self.next_newline_idx)?; + self.utf16_chars = self + .line_index + .utf16_lines + .get(&(self.next_newline_idx as u32)) + .map(|x| (next_newline, x.iter())); + self.next_newline_idx += 1; + Some(Step::Newline(next_newline)) + }) + } +} + +#[derive(Debug)] +struct OffsetStepIter<'a> { + text: &'a str, + offset: TextUnit, +} + +impl<'a> Iterator for OffsetStepIter<'a> { + type Item = Step; + fn next(&mut self) -> Option { + let (next, next_offset) = self + .text + .char_indices() + .filter_map(|(i, c)| { + if c == '\n' { + let next_offset = self.offset + TextUnit::from_usize(i + 1); + let next = Step::Newline(next_offset); + Some((next, next_offset)) + } else { + let char_len = TextUnit::of_char(c); + if char_len.to_usize() > 1 { + let start = self.offset + TextUnit::from_usize(i); + let end = start + char_len; + let next = Step::Utf16Char(TextRange::from_to(start, end)); + let next_offset = end; + Some((next, next_offset)) + } else { + None + } + } + }) + .next()?; + let next_idx = (next_offset - self.offset).to_usize(); + self.text = &self.text[next_idx..]; + self.offset = next_offset; + Some(next) + } +} + +#[derive(Debug)] +enum NextSteps<'a> { + Use, + ReplaceMany(OffsetStepIter<'a>), + AddMany(OffsetStepIter<'a>), +} + +#[derive(Debug)] +struct TranslatedEdit<'a> { + delete: TextRange, + insert: &'a str, + diff: i64, +} + +struct Edits<'a> { + edits: &'a [AtomTextEdit], + current: Option>, + acc_diff: i64, +} + +impl<'a> Edits<'a> { + fn from_text_edit(text_edit: &'a TextEdit) -> Edits<'a> { + let mut x = Edits { edits: text_edit.as_atoms(), current: None, acc_diff: 0 }; + x.advance_edit(); + x + } + fn advance_edit(&mut self) { + self.acc_diff += self.current.as_ref().map_or(0, |x| x.diff); + match self.edits.split_first() { + Some((next, rest)) => { + let delete = self.translate_range(next.delete); + let diff = next.insert.len() as i64 - next.delete.len().to_usize() as i64; + self.current = Some(TranslatedEdit { delete, insert: &next.insert, diff }); + self.edits = rest; + } + None => { + self.current = None; + } + } + } + + fn next_inserted_steps(&mut self) -> Option> { + let cur = self.current.as_ref()?; + let res = Some(OffsetStepIter { offset: cur.delete.start(), text: &cur.insert }); + self.advance_edit(); + res + } + + fn next_steps(&mut self, step: &Step) -> NextSteps { + let step_pos = match *step { + Step::Newline(n) => n, + Step::Utf16Char(r) => r.end(), + }; + match &mut self.current { + Some(edit) => { + if step_pos <= edit.delete.start() { + NextSteps::Use + } else if step_pos <= edit.delete.end() { + let iter = OffsetStepIter { offset: edit.delete.start(), text: &edit.insert }; + // empty slice to avoid returning steps again + edit.insert = &edit.insert[edit.insert.len()..]; + NextSteps::ReplaceMany(iter) + } else { + let iter = OffsetStepIter { offset: edit.delete.start(), text: &edit.insert }; + // empty slice to avoid returning steps again + edit.insert = &edit.insert[edit.insert.len()..]; + self.advance_edit(); + NextSteps::AddMany(iter) + } + } + None => NextSteps::Use, + } + } + + fn translate_range(&self, range: TextRange) -> TextRange { + if self.acc_diff == 0 { + range + } else { + let start = self.translate(range.start()); + let end = self.translate(range.end()); + TextRange::from_to(start, end) + } + } + + fn translate(&self, x: TextUnit) -> TextUnit { + if self.acc_diff == 0 { + x + } else { + TextUnit::from((x.to_usize() as i64 + self.acc_diff) as u32) + } + } + + fn translate_step(&self, x: &Step) -> Step { + if self.acc_diff == 0 { + x.clone() + } else { + match *x { + Step::Newline(n) => Step::Newline(self.translate(n)), + Step::Utf16Char(r) => Step::Utf16Char(self.translate_range(r)), + } + } + } +} + +#[derive(Debug)] +struct RunningLineCol { + line: u32, + last_newline: TextUnit, + col_adjust: TextUnit, +} + +impl RunningLineCol { + fn new() -> RunningLineCol { + RunningLineCol { line: 0, last_newline: TextUnit::from(0), col_adjust: TextUnit::from(0) } + } + + fn to_line_col(&self, offset: TextUnit) -> LineCol { + LineCol { + line: self.line, + col_utf16: ((offset - self.last_newline) - self.col_adjust).into(), + } + } + + fn add_line(&mut self, newline: TextUnit) { + self.line += 1; + self.last_newline = newline; + self.col_adjust = TextUnit::from(0); + } + + fn adjust_col(&mut self, range: TextRange) { + self.col_adjust += range.len() - TextUnit::from(1); + } +} + +pub fn translate_offset_with_edit( + line_index: &LineIndex, + offset: TextUnit, + text_edit: &TextEdit, +) -> LineCol { + let mut state = Edits::from_text_edit(&text_edit); + + let mut res = RunningLineCol::new(); + + macro_rules! test_step { + ($x:ident) => { + match &$x { + Step::Newline(n) => { + if offset < *n { + return res.to_line_col(offset); + } else { + res.add_line(*n); + } + } + Step::Utf16Char(x) => { + if offset < x.end() { + // if the offset is inside a multibyte char it's invalid + // clamp it to the start of the char + let clamp = offset.min(x.start()); + return res.to_line_col(clamp); + } else { + res.adjust_col(*x); + } + } + } + }; + } + + for orig_step in LineIndexStepIter::from(line_index) { + loop { + let translated_step = state.translate_step(&orig_step); + match state.next_steps(&translated_step) { + NextSteps::Use => { + test_step!(translated_step); + break; + } + NextSteps::ReplaceMany(ns) => { + for n in ns { + test_step!(n); + } + break; + } + NextSteps::AddMany(ns) => { + for n in ns { + test_step!(n); + } + } + } + } + } + + loop { + match state.next_inserted_steps() { + None => break, + Some(ns) => { + for n in ns { + test_step!(n); + } + } + } + } + + res.to_line_col(offset) +} + +#[cfg(test)] +mod test { + use super::*; + use crate::line_index; + use proptest::{prelude::*, proptest}; + use ra_text_edit::test_utils::{arb_offset, arb_text_with_edit}; + use ra_text_edit::TextEdit; + + #[derive(Debug)] + struct ArbTextWithEditAndOffset { + text: String, + edit: TextEdit, + edited_text: String, + offset: TextUnit, + } + + fn arb_text_with_edit_and_offset() -> BoxedStrategy { + arb_text_with_edit() + .prop_flat_map(|x| { + let edited_text = x.edit.apply(&x.text); + let arb_offset = arb_offset(&edited_text); + (Just(x), Just(edited_text), arb_offset).prop_map(|(x, edited_text, offset)| { + ArbTextWithEditAndOffset { text: x.text, edit: x.edit, edited_text, offset } + }) + }) + .boxed() + } + + proptest! { + #[test] + fn test_translate_offset_with_edit(x in arb_text_with_edit_and_offset()) { + let expected = line_index::to_line_col(&x.edited_text, x.offset); + let line_index = LineIndex::new(&x.text); + let actual = translate_offset_with_edit(&line_index, x.offset, &x.edit); + + assert_eq!(actual, expected); + } + } +} diff --git a/crates/ra_ide/src/marks.rs b/crates/ra_ide/src/marks.rs new file mode 100644 index 000000000..848ae4dc7 --- /dev/null +++ b/crates/ra_ide/src/marks.rs @@ -0,0 +1,13 @@ +//! See test_utils/src/marks.rs + +test_utils::marks!( + inserts_angle_brackets_for_generics + inserts_parens_for_function_calls + goto_definition_works_for_macros + goto_definition_works_for_methods + goto_definition_works_for_fields + goto_definition_works_for_record_fields + call_info_bad_offset + dont_complete_current_use + dont_complete_primitive_in_use +); diff --git a/crates/ra_ide/src/matching_brace.rs b/crates/ra_ide/src/matching_brace.rs new file mode 100644 index 000000000..d1204fac0 --- /dev/null +++ b/crates/ra_ide/src/matching_brace.rs @@ -0,0 +1,43 @@ +//! FIXME: write short doc here + +use ra_syntax::{ast::AstNode, SourceFile, SyntaxKind, TextUnit, T}; + +pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option { + const BRACES: &[SyntaxKind] = + &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>]]; + let (brace_node, brace_idx) = file + .syntax() + .token_at_offset(offset) + .filter_map(|node| { + let idx = BRACES.iter().position(|&brace| brace == node.kind())?; + Some((node, idx)) + }) + .next()?; + let parent = brace_node.parent(); + let matching_kind = BRACES[brace_idx ^ 1]; + let matching_node = parent.children_with_tokens().find(|node| node.kind() == matching_kind)?; + Some(matching_node.text_range().start()) +} + +#[cfg(test)] +mod tests { + use test_utils::{add_cursor, assert_eq_text, extract_offset}; + + use super::*; + + #[test] + fn test_matching_brace() { + fn do_check(before: &str, after: &str) { + let (pos, before) = extract_offset(before); + let parse = SourceFile::parse(&before); + let new_pos = match matching_brace(&parse.tree(), pos) { + None => pos, + Some(pos) => pos, + }; + let actual = add_cursor(&before, new_pos); + assert_eq_text!(after, &actual); + } + + do_check("struct Foo { a: i32, }<|>", "struct Foo <|>{ a: i32, }"); + } +} diff --git a/crates/ra_ide/src/mock_analysis.rs b/crates/ra_ide/src/mock_analysis.rs new file mode 100644 index 000000000..bf8a54932 --- /dev/null +++ b/crates/ra_ide/src/mock_analysis.rs @@ -0,0 +1,149 @@ +//! FIXME: write short doc here + +use std::sync::Arc; + +use ra_cfg::CfgOptions; +use ra_db::{Env, RelativePathBuf}; +use test_utils::{extract_offset, extract_range, parse_fixture, CURSOR_MARKER}; + +use crate::{ + Analysis, AnalysisChange, AnalysisHost, CrateGraph, Edition::Edition2018, FileId, FilePosition, + FileRange, SourceRootId, +}; + +/// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis +/// from a set of in-memory files. +#[derive(Debug, Default)] +pub struct MockAnalysis { + files: Vec<(String, String)>, +} + +impl MockAnalysis { + pub fn new() -> MockAnalysis { + MockAnalysis::default() + } + /// Creates `MockAnalysis` using a fixture data in the following format: + /// + /// ```not_rust + /// //- /main.rs + /// mod foo; + /// fn main() {} + /// + /// //- /foo.rs + /// struct Baz; + /// ``` + pub fn with_files(fixture: &str) -> MockAnalysis { + let mut res = MockAnalysis::new(); + for entry in parse_fixture(fixture) { + res.add_file(&entry.meta, &entry.text); + } + res + } + + /// Same as `with_files`, but requires that a single file contains a `<|>` marker, + /// whose position is also returned. + pub fn with_files_and_position(fixture: &str) -> (MockAnalysis, FilePosition) { + let mut position = None; + let mut res = MockAnalysis::new(); + for entry in parse_fixture(fixture) { + if entry.text.contains(CURSOR_MARKER) { + assert!(position.is_none(), "only one marker (<|>) per fixture is allowed"); + position = Some(res.add_file_with_position(&entry.meta, &entry.text)); + } else { + res.add_file(&entry.meta, &entry.text); + } + } + let position = position.expect("expected a marker (<|>)"); + (res, position) + } + + pub fn add_file(&mut self, path: &str, text: &str) -> FileId { + let file_id = FileId((self.files.len() + 1) as u32); + self.files.push((path.to_string(), text.to_string())); + file_id + } + pub fn add_file_with_position(&mut self, path: &str, text: &str) -> FilePosition { + let (offset, text) = extract_offset(text); + let file_id = FileId((self.files.len() + 1) as u32); + self.files.push((path.to_string(), text)); + FilePosition { file_id, offset } + } + pub fn add_file_with_range(&mut self, path: &str, text: &str) -> FileRange { + let (range, text) = extract_range(text); + let file_id = FileId((self.files.len() + 1) as u32); + self.files.push((path.to_string(), text)); + FileRange { file_id, range } + } + pub fn id_of(&self, path: &str) -> FileId { + let (idx, _) = self + .files + .iter() + .enumerate() + .find(|(_, (p, _text))| path == p) + .expect("no file in this mock"); + FileId(idx as u32 + 1) + } + pub fn analysis_host(self) -> AnalysisHost { + let mut host = AnalysisHost::default(); + let source_root = SourceRootId(0); + let mut change = AnalysisChange::new(); + change.add_root(source_root, true); + let mut crate_graph = CrateGraph::default(); + let mut root_crate = None; + for (i, (path, contents)) in self.files.into_iter().enumerate() { + assert!(path.starts_with('/')); + let path = RelativePathBuf::from_path(&path[1..]).unwrap(); + let file_id = FileId(i as u32 + 1); + let cfg_options = CfgOptions::default(); + if path == "/lib.rs" || path == "/main.rs" { + root_crate = Some(crate_graph.add_crate_root( + file_id, + Edition2018, + cfg_options, + Env::default(), + )); + } else if path.ends_with("/lib.rs") { + let other_crate = + crate_graph.add_crate_root(file_id, Edition2018, cfg_options, Env::default()); + let crate_name = path.parent().unwrap().file_name().unwrap(); + if let Some(root_crate) = root_crate { + crate_graph.add_dep(root_crate, crate_name.into(), other_crate).unwrap(); + } + } + change.add_file(source_root, file_id, path, Arc::new(contents)); + } + change.set_crate_graph(crate_graph); + host.apply_change(change); + host + } + pub fn analysis(self) -> Analysis { + self.analysis_host().analysis() + } +} + +/// Creates analysis from a multi-file fixture, returns positions marked with <|>. +pub fn analysis_and_position(fixture: &str) -> (Analysis, FilePosition) { + let (mock, position) = MockAnalysis::with_files_and_position(fixture); + (mock.analysis(), position) +} + +/// Creates analysis for a single file. +pub fn single_file(code: &str) -> (Analysis, FileId) { + let mut mock = MockAnalysis::new(); + let file_id = mock.add_file("/main.rs", code); + (mock.analysis(), file_id) +} + +/// Creates analysis for a single file, returns position marked with <|>. +pub fn single_file_with_position(code: &str) -> (Analysis, FilePosition) { + let mut mock = MockAnalysis::new(); + let pos = mock.add_file_with_position("/main.rs", code); + (mock.analysis(), pos) +} + +/// Creates analysis for a single file, returns range marked with a pair of <|>. +pub fn single_file_with_range(code: &str) -> (Analysis, FileRange) { + let mut mock = MockAnalysis::new(); + let pos = mock.add_file_with_range("/main.rs", code); + (mock.analysis(), pos) +} diff --git a/crates/ra_ide/src/parent_module.rs b/crates/ra_ide/src/parent_module.rs new file mode 100644 index 000000000..6027e7d54 --- /dev/null +++ b/crates/ra_ide/src/parent_module.rs @@ -0,0 +1,104 @@ +//! FIXME: write short doc here + +use ra_db::{CrateId, FileId, FilePosition}; + +use crate::{db::RootDatabase, NavigationTarget}; + +/// This returns `Vec` because a module may be included from several places. We +/// don't handle this case yet though, so the Vec has length at most one. +pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec { + let src = hir::ModuleSource::from_position(db, position); + let module = match hir::Module::from_definition( + db, + hir::Source { file_id: position.file_id.into(), value: src }, + ) { + None => return Vec::new(), + Some(it) => it, + }; + let nav = NavigationTarget::from_module_to_decl(db, module); + vec![nav] +} + +/// Returns `Vec` for the same reason as `parent_module` +pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec { + let src = hir::ModuleSource::from_file_id(db, file_id); + let module = + match hir::Module::from_definition(db, hir::Source { file_id: file_id.into(), value: src }) + { + Some(it) => it, + None => return Vec::new(), + }; + let krate = module.krate(); + vec![krate.crate_id()] +} + +#[cfg(test)] +mod tests { + use ra_cfg::CfgOptions; + use ra_db::Env; + + use crate::{ + mock_analysis::{analysis_and_position, MockAnalysis}, + AnalysisChange, CrateGraph, + Edition::Edition2018, + }; + + #[test] + fn test_resolve_parent_module() { + let (analysis, pos) = analysis_and_position( + " + //- /lib.rs + mod foo; + //- /foo.rs + <|>// empty + ", + ); + let nav = analysis.parent_module(pos).unwrap().pop().unwrap(); + nav.assert_match("foo MODULE FileId(1) [0; 8)"); + } + + #[test] + fn test_resolve_parent_module_for_inline() { + let (analysis, pos) = analysis_and_position( + " + //- /lib.rs + mod foo { + mod bar { + mod baz { <|> } + } + } + ", + ); + let nav = analysis.parent_module(pos).unwrap().pop().unwrap(); + nav.assert_match("baz MODULE FileId(1) [32; 44)"); + } + + #[test] + fn test_resolve_crate_root() { + let mock = MockAnalysis::with_files( + " + //- /bar.rs + mod foo; + //- /foo.rs + // empty <|> + ", + ); + let root_file = mock.id_of("/bar.rs"); + let mod_file = mock.id_of("/foo.rs"); + let mut host = mock.analysis_host(); + assert!(host.analysis().crate_for(mod_file).unwrap().is_empty()); + + let mut crate_graph = CrateGraph::default(); + let crate_id = crate_graph.add_crate_root( + root_file, + Edition2018, + CfgOptions::default(), + Env::default(), + ); + let mut change = AnalysisChange::new(); + change.set_crate_graph(crate_graph); + host.apply_change(change); + + assert_eq!(host.analysis().crate_for(mod_file).unwrap(), vec![crate_id]); + } +} diff --git a/crates/ra_ide/src/references.rs b/crates/ra_ide/src/references.rs new file mode 100644 index 000000000..21a1ea69e --- /dev/null +++ b/crates/ra_ide/src/references.rs @@ -0,0 +1,389 @@ +//! This module implements a reference search. +//! First, the element at the cursor position must be either an `ast::Name` +//! or `ast::NameRef`. If it's a `ast::NameRef`, at the classification step we +//! try to resolve the direct tree parent of this element, otherwise we +//! already have a definition and just need to get its HIR together with +//! some information that is needed for futher steps of searching. +//! After that, we collect files that might contain references and look +//! for text occurrences of the identifier. If there's an `ast::NameRef` +//! at the index that the match starts at and its tree parent is +//! resolved to the search element definition, we get a reference. + +mod classify; +mod name_definition; +mod rename; +mod search_scope; + +use hir::Source; +use once_cell::unsync::Lazy; +use ra_db::{SourceDatabase, SourceDatabaseExt}; +use ra_prof::profile; +use ra_syntax::{algo::find_node_at_offset, ast, AstNode, SourceFile, SyntaxNode, TextUnit}; + +use crate::{ + db::RootDatabase, display::ToNav, FilePosition, FileRange, NavigationTarget, RangeInfo, +}; + +pub(crate) use self::{ + classify::{classify_name, classify_name_ref}, + name_definition::{NameDefinition, NameKind}, + rename::rename, +}; + +pub use self::search_scope::SearchScope; + +#[derive(Debug, Clone)] +pub struct ReferenceSearchResult { + declaration: NavigationTarget, + references: Vec, +} + +impl ReferenceSearchResult { + pub fn declaration(&self) -> &NavigationTarget { + &self.declaration + } + + pub fn references(&self) -> &[FileRange] { + &self.references + } + + /// Total number of references + /// At least 1 since all valid references should + /// Have a declaration + pub fn len(&self) -> usize { + self.references.len() + 1 + } +} + +// allow turning ReferenceSearchResult into an iterator +// over FileRanges +impl IntoIterator for ReferenceSearchResult { + type Item = FileRange; + type IntoIter = std::vec::IntoIter; + + fn into_iter(mut self) -> Self::IntoIter { + let mut v = Vec::with_capacity(self.len()); + v.push(FileRange { file_id: self.declaration.file_id(), range: self.declaration.range() }); + v.append(&mut self.references); + v.into_iter() + } +} + +pub(crate) fn find_all_refs( + db: &RootDatabase, + position: FilePosition, + search_scope: Option, +) -> Option> { + let parse = db.parse(position.file_id); + let syntax = parse.tree().syntax().clone(); + let RangeInfo { range, info: (name, def) } = find_name(db, &syntax, position)?; + + let declaration = match def.kind { + NameKind::Macro(mac) => mac.to_nav(db), + NameKind::Field(field) => field.to_nav(db), + NameKind::AssocItem(assoc) => assoc.to_nav(db), + NameKind::Def(def) => NavigationTarget::from_def(db, def)?, + NameKind::SelfType(imp) => imp.to_nav(db), + NameKind::Local(local) => local.to_nav(db), + NameKind::GenericParam(_) => return None, + }; + + let search_scope = { + let base = def.search_scope(db); + match search_scope { + None => base, + Some(scope) => base.intersection(&scope), + } + }; + + let references = process_definition(db, def, name, search_scope); + + Some(RangeInfo::new(range, ReferenceSearchResult { declaration, references })) +} + +fn find_name<'a>( + db: &RootDatabase, + syntax: &SyntaxNode, + position: FilePosition, +) -> Option> { + if let Some(name) = find_node_at_offset::(&syntax, position.offset) { + let def = classify_name(db, Source::new(position.file_id.into(), &name))?; + let range = name.syntax().text_range(); + return Some(RangeInfo::new(range, (name.text().to_string(), def))); + } + let name_ref = find_node_at_offset::(&syntax, position.offset)?; + let def = classify_name_ref(db, Source::new(position.file_id.into(), &name_ref))?; + let range = name_ref.syntax().text_range(); + Some(RangeInfo::new(range, (name_ref.text().to_string(), def))) +} + +fn process_definition( + db: &RootDatabase, + def: NameDefinition, + name: String, + scope: SearchScope, +) -> Vec { + let _p = profile("process_definition"); + + let pat = name.as_str(); + let mut refs = vec![]; + + for (file_id, search_range) in scope { + let text = db.file_text(file_id); + let parse = Lazy::new(|| SourceFile::parse(&text)); + + for (idx, _) in text.match_indices(pat) { + let offset = TextUnit::from_usize(idx); + + if let Some(name_ref) = + find_node_at_offset::(parse.tree().syntax(), offset) + { + let range = name_ref.syntax().text_range(); + if let Some(search_range) = search_range { + if !range.is_subrange(&search_range) { + continue; + } + } + if let Some(d) = classify_name_ref(db, Source::new(file_id.into(), &name_ref)) { + if d == def { + refs.push(FileRange { file_id, range }); + } + } + } + } + } + refs +} + +#[cfg(test)] +mod tests { + use crate::{ + mock_analysis::{analysis_and_position, single_file_with_position, MockAnalysis}, + ReferenceSearchResult, SearchScope, + }; + + #[test] + fn test_find_all_refs_for_local() { + let code = r#" + fn main() { + let mut i = 1; + let j = 1; + i = i<|> + j; + + { + i = 0; + } + + i = 5; + }"#; + + let refs = get_all_refs(code); + assert_eq!(refs.len(), 5); + } + + #[test] + fn test_find_all_refs_for_param_inside() { + let code = r#" + fn foo(i : u32) -> u32 { + i<|> + }"#; + + let refs = get_all_refs(code); + assert_eq!(refs.len(), 2); + } + + #[test] + fn test_find_all_refs_for_fn_param() { + let code = r#" + fn foo(i<|> : u32) -> u32 { + i + }"#; + + let refs = get_all_refs(code); + assert_eq!(refs.len(), 2); + } + + #[test] + fn test_find_all_refs_field_name() { + let code = r#" + //- /lib.rs + struct Foo { + pub spam<|>: u32, + } + + fn main(s: Foo) { + let f = s.spam; + } + "#; + + let refs = get_all_refs(code); + assert_eq!(refs.len(), 2); + } + + #[test] + fn test_find_all_refs_impl_item_name() { + let code = r#" + //- /lib.rs + struct Foo; + impl Foo { + fn f<|>(&self) { } + } + "#; + + let refs = get_all_refs(code); + assert_eq!(refs.len(), 1); + } + + #[test] + fn test_find_all_refs_enum_var_name() { + let code = r#" + //- /lib.rs + enum Foo { + A, + B<|>, + C, + } + "#; + + let refs = get_all_refs(code); + assert_eq!(refs.len(), 1); + } + + #[test] + fn test_find_all_refs_two_modules() { + let code = r#" + //- /lib.rs + pub mod foo; + pub mod bar; + + fn f() { + let i = foo::Foo { n: 5 }; + } + + //- /foo.rs + use crate::bar; + + pub struct Foo { + pub n: u32, + } + + fn f() { + let i = bar::Bar { n: 5 }; + } + + //- /bar.rs + use crate::foo; + + pub struct Bar { + pub n: u32, + } + + fn f() { + let i = foo::Foo<|> { n: 5 }; + } + "#; + + let (analysis, pos) = analysis_and_position(code); + let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); + assert_eq!(refs.len(), 3); + } + + // `mod foo;` is not in the results because `foo` is an `ast::Name`. + // So, there are two references: the first one is a definition of the `foo` module, + // which is the whole `foo.rs`, and the second one is in `use foo::Foo`. + #[test] + fn test_find_all_refs_decl_module() { + let code = r#" + //- /lib.rs + mod foo<|>; + + use foo::Foo; + + fn f() { + let i = Foo { n: 5 }; + } + + //- /foo.rs + pub struct Foo { + pub n: u32, + } + "#; + + let (analysis, pos) = analysis_and_position(code); + let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); + assert_eq!(refs.len(), 2); + } + + #[test] + fn test_find_all_refs_super_mod_vis() { + let code = r#" + //- /lib.rs + mod foo; + + //- /foo.rs + mod some; + use some::Foo; + + fn f() { + let i = Foo { n: 5 }; + } + + //- /foo/some.rs + pub(super) struct Foo<|> { + pub n: u32, + } + "#; + + let (analysis, pos) = analysis_and_position(code); + let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); + assert_eq!(refs.len(), 3); + } + + #[test] + fn test_find_all_refs_with_scope() { + let code = r#" + //- /lib.rs + mod foo; + mod bar; + + pub fn quux<|>() {} + + //- /foo.rs + fn f() { super::quux(); } + + //- /bar.rs + fn f() { super::quux(); } + "#; + + let (mock, pos) = MockAnalysis::with_files_and_position(code); + let bar = mock.id_of("/bar.rs"); + let analysis = mock.analysis(); + + let refs = analysis.find_all_refs(pos, None).unwrap().unwrap(); + assert_eq!(refs.len(), 3); + + let refs = + analysis.find_all_refs(pos, Some(SearchScope::single_file(bar))).unwrap().unwrap(); + assert_eq!(refs.len(), 2); + } + + #[test] + fn test_find_all_refs_macro_def() { + let code = r#" + #[macro_export] + macro_rules! m1<|> { () => (()) } + + fn foo() { + m1(); + m1(); + }"#; + + let refs = get_all_refs(code); + assert_eq!(refs.len(), 3); + } + + fn get_all_refs(text: &str) -> ReferenceSearchResult { + let (analysis, position) = single_file_with_position(text); + analysis.find_all_refs(position, None).unwrap().unwrap() + } +} diff --git a/crates/ra_ide/src/references/classify.rs b/crates/ra_ide/src/references/classify.rs new file mode 100644 index 000000000..5cea805ec --- /dev/null +++ b/crates/ra_ide/src/references/classify.rs @@ -0,0 +1,186 @@ +//! Functions that are used to classify an element from its definition or reference. + +use hir::{FromSource, Module, ModuleSource, PathResolution, Source, SourceAnalyzer}; +use ra_prof::profile; +use ra_syntax::{ast, match_ast, AstNode}; +use test_utils::tested_by; + +use super::{ + name_definition::{from_assoc_item, from_module_def, from_struct_field}, + NameDefinition, NameKind, +}; +use crate::db::RootDatabase; + +pub(crate) fn classify_name(db: &RootDatabase, name: Source<&ast::Name>) -> Option { + let _p = profile("classify_name"); + let parent = name.value.syntax().parent()?; + + match_ast! { + match parent { + ast::BindPat(it) => { + let src = name.with_value(it); + let local = hir::Local::from_source(db, src)?; + Some(NameDefinition { + visibility: None, + container: local.module(db), + kind: NameKind::Local(local), + }) + }, + ast::RecordFieldDef(it) => { + let ast = hir::FieldSource::Named(it); + let src = name.with_value(ast); + let field = hir::StructField::from_source(db, src)?; + Some(from_struct_field(db, field)) + }, + ast::Module(it) => { + let def = { + if !it.has_semi() { + let ast = hir::ModuleSource::Module(it); + let src = name.with_value(ast); + hir::Module::from_definition(db, src) + } else { + let src = name.with_value(it); + hir::Module::from_declaration(db, src) + } + }?; + Some(from_module_def(db, def.into(), None)) + }, + ast::StructDef(it) => { + let src = name.with_value(it); + let def = hir::Struct::from_source(db, src)?; + Some(from_module_def(db, def.into(), None)) + }, + ast::EnumDef(it) => { + let src = name.with_value(it); + let def = hir::Enum::from_source(db, src)?; + Some(from_module_def(db, def.into(), None)) + }, + ast::TraitDef(it) => { + let src = name.with_value(it); + let def = hir::Trait::from_source(db, src)?; + Some(from_module_def(db, def.into(), None)) + }, + ast::StaticDef(it) => { + let src = name.with_value(it); + let def = hir::Static::from_source(db, src)?; + Some(from_module_def(db, def.into(), None)) + }, + ast::EnumVariant(it) => { + let src = name.with_value(it); + let def = hir::EnumVariant::from_source(db, src)?; + Some(from_module_def(db, def.into(), None)) + }, + ast::FnDef(it) => { + let src = name.with_value(it); + let def = hir::Function::from_source(db, src)?; + if parent.parent().and_then(ast::ItemList::cast).is_some() { + Some(from_assoc_item(db, def.into())) + } else { + Some(from_module_def(db, def.into(), None)) + } + }, + ast::ConstDef(it) => { + let src = name.with_value(it); + let def = hir::Const::from_source(db, src)?; + if parent.parent().and_then(ast::ItemList::cast).is_some() { + Some(from_assoc_item(db, def.into())) + } else { + Some(from_module_def(db, def.into(), None)) + } + }, + ast::TypeAliasDef(it) => { + let src = name.with_value(it); + let def = hir::TypeAlias::from_source(db, src)?; + if parent.parent().and_then(ast::ItemList::cast).is_some() { + Some(from_assoc_item(db, def.into())) + } else { + Some(from_module_def(db, def.into(), None)) + } + }, + ast::MacroCall(it) => { + let src = name.with_value(it); + let def = hir::MacroDef::from_source(db, src.clone())?; + + let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax())); + let module = Module::from_definition(db, src.with_value(module_src))?; + + Some(NameDefinition { + visibility: None, + container: module, + kind: NameKind::Macro(def), + }) + }, + _ => None, + } + } +} + +pub(crate) fn classify_name_ref( + db: &RootDatabase, + name_ref: Source<&ast::NameRef>, +) -> Option { + let _p = profile("classify_name_ref"); + + let parent = name_ref.value.syntax().parent()?; + let analyzer = SourceAnalyzer::new(db, name_ref.map(|it| it.syntax()), None); + + if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) { + tested_by!(goto_definition_works_for_methods); + if let Some(func) = analyzer.resolve_method_call(&method_call) { + return Some(from_assoc_item(db, func.into())); + } + } + + if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { + tested_by!(goto_definition_works_for_fields); + if let Some(field) = analyzer.resolve_field(&field_expr) { + return Some(from_struct_field(db, field)); + } + } + + if let Some(record_field) = ast::RecordField::cast(parent.clone()) { + tested_by!(goto_definition_works_for_record_fields); + if let Some(field_def) = analyzer.resolve_record_field(&record_field) { + return Some(from_struct_field(db, field_def)); + } + } + + let ast = ModuleSource::from_child_node(db, name_ref.with_value(&parent)); + // FIXME: find correct container and visibility for each case + let container = Module::from_definition(db, name_ref.with_value(ast))?; + let visibility = None; + + if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) { + tested_by!(goto_definition_works_for_macros); + if let Some(macro_def) = analyzer.resolve_macro_call(db, name_ref.with_value(¯o_call)) { + let kind = NameKind::Macro(macro_def); + return Some(NameDefinition { kind, container, visibility }); + } + } + + let path = name_ref.value.syntax().ancestors().find_map(ast::Path::cast)?; + let resolved = analyzer.resolve_path(db, &path)?; + match resolved { + PathResolution::Def(def) => Some(from_module_def(db, def, Some(container))), + PathResolution::AssocItem(item) => Some(from_assoc_item(db, item)), + PathResolution::Local(local) => { + let container = local.module(db); + let kind = NameKind::Local(local); + Some(NameDefinition { kind, container, visibility: None }) + } + PathResolution::GenericParam(par) => { + // FIXME: get generic param def + let kind = NameKind::GenericParam(par); + Some(NameDefinition { kind, container, visibility }) + } + PathResolution::Macro(def) => { + let kind = NameKind::Macro(def); + Some(NameDefinition { kind, container, visibility }) + } + PathResolution::SelfType(impl_block) => { + let kind = NameKind::SelfType(impl_block); + let container = impl_block.module(db); + Some(NameDefinition { kind, container, visibility }) + } + } +} diff --git a/crates/ra_ide/src/references/name_definition.rs b/crates/ra_ide/src/references/name_definition.rs new file mode 100644 index 000000000..10d3a2364 --- /dev/null +++ b/crates/ra_ide/src/references/name_definition.rs @@ -0,0 +1,83 @@ +//! `NameDefinition` keeps information about the element we want to search references for. +//! The element is represented by `NameKind`. It's located inside some `container` and +//! has a `visibility`, which defines a search scope. +//! Note that the reference search is possible for not all of the classified items. + +use hir::{ + Adt, AssocItem, GenericParam, HasSource, ImplBlock, Local, MacroDef, Module, ModuleDef, + StructField, VariantDef, +}; +use ra_syntax::{ast, ast::VisibilityOwner}; + +use crate::db::RootDatabase; + +#[derive(Debug, PartialEq, Eq)] +pub enum NameKind { + Macro(MacroDef), + Field(StructField), + AssocItem(AssocItem), + Def(ModuleDef), + SelfType(ImplBlock), + Local(Local), + GenericParam(GenericParam), +} + +#[derive(PartialEq, Eq)] +pub(crate) struct NameDefinition { + pub visibility: Option, + pub container: Module, + pub kind: NameKind, +} + +pub(super) fn from_assoc_item(db: &RootDatabase, item: AssocItem) -> NameDefinition { + let container = item.module(db); + let visibility = match item { + AssocItem::Function(f) => f.source(db).value.visibility(), + AssocItem::Const(c) => c.source(db).value.visibility(), + AssocItem::TypeAlias(a) => a.source(db).value.visibility(), + }; + let kind = NameKind::AssocItem(item); + NameDefinition { kind, container, visibility } +} + +pub(super) fn from_struct_field(db: &RootDatabase, field: StructField) -> NameDefinition { + let kind = NameKind::Field(field); + let parent = field.parent_def(db); + let container = parent.module(db); + let visibility = match parent { + VariantDef::Struct(s) => s.source(db).value.visibility(), + VariantDef::Union(e) => e.source(db).value.visibility(), + VariantDef::EnumVariant(e) => e.source(db).value.parent_enum().visibility(), + }; + NameDefinition { kind, container, visibility } +} + +pub(super) fn from_module_def( + db: &RootDatabase, + def: ModuleDef, + module: Option, +) -> NameDefinition { + let kind = NameKind::Def(def); + let (container, visibility) = match def { + ModuleDef::Module(it) => { + let container = it.parent(db).or_else(|| Some(it)).unwrap(); + let visibility = it.declaration_source(db).and_then(|s| s.value.visibility()); + (container, visibility) + } + ModuleDef::EnumVariant(it) => { + let container = it.module(db); + let visibility = it.source(db).value.parent_enum().visibility(); + (container, visibility) + } + ModuleDef::Function(it) => (it.module(db), it.source(db).value.visibility()), + ModuleDef::Const(it) => (it.module(db), it.source(db).value.visibility()), + ModuleDef::Static(it) => (it.module(db), it.source(db).value.visibility()), + ModuleDef::Trait(it) => (it.module(db), it.source(db).value.visibility()), + ModuleDef::TypeAlias(it) => (it.module(db), it.source(db).value.visibility()), + ModuleDef::Adt(Adt::Struct(it)) => (it.module(db), it.source(db).value.visibility()), + ModuleDef::Adt(Adt::Union(it)) => (it.module(db), it.source(db).value.visibility()), + ModuleDef::Adt(Adt::Enum(it)) => (it.module(db), it.source(db).value.visibility()), + ModuleDef::BuiltinType(..) => (module.unwrap(), None), + }; + NameDefinition { kind, container, visibility } +} diff --git a/crates/ra_ide/src/references/rename.rs b/crates/ra_ide/src/references/rename.rs new file mode 100644 index 000000000..d58496049 --- /dev/null +++ b/crates/ra_ide/src/references/rename.rs @@ -0,0 +1,328 @@ +//! FIXME: write short doc here + +use hir::ModuleSource; +use ra_db::{RelativePath, RelativePathBuf, SourceDatabase, SourceDatabaseExt}; +use ra_syntax::{algo::find_node_at_offset, ast, AstNode, SyntaxNode}; +use ra_text_edit::TextEdit; + +use crate::{ + db::RootDatabase, FileId, FilePosition, FileSystemEdit, RangeInfo, SourceChange, + SourceFileEdit, TextRange, +}; + +use super::find_all_refs; + +pub(crate) fn rename( + db: &RootDatabase, + position: FilePosition, + new_name: &str, +) -> Option> { + let parse = db.parse(position.file_id); + if let Some((ast_name, ast_module)) = + find_name_and_module_at_offset(parse.tree().syntax(), position) + { + let range = ast_name.syntax().text_range(); + rename_mod(db, &ast_name, &ast_module, position, new_name) + .map(|info| RangeInfo::new(range, info)) + } else { + rename_reference(db, position, new_name) + } +} + +fn find_name_and_module_at_offset( + syntax: &SyntaxNode, + position: FilePosition, +) -> Option<(ast::Name, ast::Module)> { + let ast_name = find_node_at_offset::(syntax, position.offset)?; + let ast_module = ast::Module::cast(ast_name.syntax().parent()?)?; + Some((ast_name, ast_module)) +} + +fn source_edit_from_file_id_range( + file_id: FileId, + range: TextRange, + new_name: &str, +) -> SourceFileEdit { + SourceFileEdit { file_id, edit: TextEdit::replace(range, new_name.into()) } +} + +fn rename_mod( + db: &RootDatabase, + ast_name: &ast::Name, + ast_module: &ast::Module, + position: FilePosition, + new_name: &str, +) -> Option { + let mut source_file_edits = Vec::new(); + let mut file_system_edits = Vec::new(); + let module_src = hir::Source { file_id: position.file_id.into(), value: ast_module.clone() }; + if let Some(module) = hir::Module::from_declaration(db, module_src) { + let src = module.definition_source(db); + let file_id = src.file_id.original_file(db); + match src.value { + ModuleSource::SourceFile(..) => { + let mod_path: RelativePathBuf = db.file_relative_path(file_id); + // mod is defined in path/to/dir/mod.rs + let dst_path = if mod_path.file_stem() == Some("mod") { + mod_path + .parent() + .and_then(|p| p.parent()) + .or_else(|| Some(RelativePath::new(""))) + .map(|p| p.join(new_name).join("mod.rs")) + } else { + Some(mod_path.with_file_name(new_name).with_extension("rs")) + }; + if let Some(path) = dst_path { + let move_file = FileSystemEdit::MoveFile { + src: file_id, + dst_source_root: db.file_source_root(position.file_id), + dst_path: path, + }; + file_system_edits.push(move_file); + } + } + ModuleSource::Module(..) => {} + } + } + + let edit = SourceFileEdit { + file_id: position.file_id, + edit: TextEdit::replace(ast_name.syntax().text_range(), new_name.into()), + }; + source_file_edits.push(edit); + + Some(SourceChange::from_edits("rename", source_file_edits, file_system_edits)) +} + +fn rename_reference( + db: &RootDatabase, + position: FilePosition, + new_name: &str, +) -> Option> { + let RangeInfo { range, info: refs } = find_all_refs(db, position, None)?; + + let edit = refs + .into_iter() + .map(|range| source_edit_from_file_id_range(range.file_id, range.range, new_name)) + .collect::>(); + + if edit.is_empty() { + return None; + } + + Some(RangeInfo::new(range, SourceChange::source_file_edits("rename", edit))) +} + +#[cfg(test)] +mod tests { + use insta::assert_debug_snapshot; + use ra_text_edit::TextEditBuilder; + use test_utils::assert_eq_text; + + use crate::{ + mock_analysis::analysis_and_position, mock_analysis::single_file_with_position, FileId, + }; + + #[test] + fn test_rename_for_local() { + test_rename( + r#" + fn main() { + let mut i = 1; + let j = 1; + i = i<|> + j; + + { + i = 0; + } + + i = 5; + }"#, + "k", + r#" + fn main() { + let mut k = 1; + let j = 1; + k = k + j; + + { + k = 0; + } + + k = 5; + }"#, + ); + } + + #[test] + fn test_rename_for_param_inside() { + test_rename( + r#" + fn foo(i : u32) -> u32 { + i<|> + }"#, + "j", + r#" + fn foo(j : u32) -> u32 { + j + }"#, + ); + } + + #[test] + fn test_rename_refs_for_fn_param() { + test_rename( + r#" + fn foo(i<|> : u32) -> u32 { + i + }"#, + "new_name", + r#" + fn foo(new_name : u32) -> u32 { + new_name + }"#, + ); + } + + #[test] + fn test_rename_for_mut_param() { + test_rename( + r#" + fn foo(mut i<|> : u32) -> u32 { + i + }"#, + "new_name", + r#" + fn foo(mut new_name : u32) -> u32 { + new_name + }"#, + ); + } + + #[test] + fn test_rename_mod() { + let (analysis, position) = analysis_and_position( + " + //- /lib.rs + mod bar; + + //- /bar.rs + mod foo<|>; + + //- /bar/foo.rs + // emtpy + ", + ); + let new_name = "foo2"; + let source_change = analysis.rename(position, new_name).unwrap(); + assert_debug_snapshot!(&source_change, +@r###" + Some( + RangeInfo { + range: [4; 7), + info: SourceChange { + label: "rename", + source_file_edits: [ + SourceFileEdit { + file_id: FileId( + 2, + ), + edit: TextEdit { + atoms: [ + AtomTextEdit { + delete: [4; 7), + insert: "foo2", + }, + ], + }, + }, + ], + file_system_edits: [ + MoveFile { + src: FileId( + 3, + ), + dst_source_root: SourceRootId( + 0, + ), + dst_path: "bar/foo2.rs", + }, + ], + cursor_position: None, + }, + }, + ) + "###); + } + + #[test] + fn test_rename_mod_in_dir() { + let (analysis, position) = analysis_and_position( + " + //- /lib.rs + mod fo<|>o; + //- /foo/mod.rs + // emtpy + ", + ); + let new_name = "foo2"; + let source_change = analysis.rename(position, new_name).unwrap(); + assert_debug_snapshot!(&source_change, + @r###" + Some( + RangeInfo { + range: [4; 7), + info: SourceChange { + label: "rename", + source_file_edits: [ + SourceFileEdit { + file_id: FileId( + 1, + ), + edit: TextEdit { + atoms: [ + AtomTextEdit { + delete: [4; 7), + insert: "foo2", + }, + ], + }, + }, + ], + file_system_edits: [ + MoveFile { + src: FileId( + 2, + ), + dst_source_root: SourceRootId( + 0, + ), + dst_path: "foo2/mod.rs", + }, + ], + cursor_position: None, + }, + }, + ) + "### + ); + } + + fn test_rename(text: &str, new_name: &str, expected: &str) { + let (analysis, position) = single_file_with_position(text); + let source_change = analysis.rename(position, new_name).unwrap(); + let mut text_edit_builder = TextEditBuilder::default(); + let mut file_id: Option = None; + if let Some(change) = source_change { + for edit in change.info.source_file_edits { + file_id = Some(edit.file_id); + for atom in edit.edit.as_atoms() { + text_edit_builder.replace(atom.delete, atom.insert.clone()); + } + } + } + let result = + text_edit_builder.finish().apply(&*analysis.file_text(file_id.unwrap()).unwrap()); + assert_eq_text!(expected, &*result); + } +} diff --git a/crates/ra_ide/src/references/search_scope.rs b/crates/ra_ide/src/references/search_scope.rs new file mode 100644 index 000000000..f5c9589f4 --- /dev/null +++ b/crates/ra_ide/src/references/search_scope.rs @@ -0,0 +1,145 @@ +//! Generally, `search_scope` returns files that might contain references for the element. +//! For `pub(crate)` things it's a crate, for `pub` things it's a crate and dependant crates. +//! In some cases, the location of the references is known to within a `TextRange`, +//! e.g. for things like local variables. +use std::mem; + +use hir::{DefWithBody, HasSource, ModuleSource}; +use ra_db::{FileId, SourceDatabase, SourceDatabaseExt}; +use ra_prof::profile; +use ra_syntax::{AstNode, TextRange}; +use rustc_hash::FxHashMap; + +use crate::db::RootDatabase; + +use super::{NameDefinition, NameKind}; + +pub struct SearchScope { + entries: FxHashMap>, +} + +impl SearchScope { + fn new(entries: FxHashMap>) -> SearchScope { + SearchScope { entries } + } + pub fn single_file(file: FileId) -> SearchScope { + SearchScope::new(std::iter::once((file, None)).collect()) + } + pub(crate) fn intersection(&self, other: &SearchScope) -> SearchScope { + let (mut small, mut large) = (&self.entries, &other.entries); + if small.len() > large.len() { + mem::swap(&mut small, &mut large) + } + + let res = small + .iter() + .filter_map(|(file_id, r1)| { + let r2 = large.get(file_id)?; + let r = intersect_ranges(*r1, *r2)?; + Some((*file_id, r)) + }) + .collect(); + return SearchScope::new(res); + + fn intersect_ranges( + r1: Option, + r2: Option, + ) -> Option> { + match (r1, r2) { + (None, r) | (r, None) => Some(r), + (Some(r1), Some(r2)) => { + let r = r1.intersection(&r2)?; + Some(Some(r)) + } + } + } + } +} + +impl IntoIterator for SearchScope { + type Item = (FileId, Option); + type IntoIter = std::collections::hash_map::IntoIter>; + fn into_iter(self) -> Self::IntoIter { + self.entries.into_iter() + } +} + +impl NameDefinition { + pub(crate) fn search_scope(&self, db: &RootDatabase) -> SearchScope { + let _p = profile("search_scope"); + + let module_src = self.container.definition_source(db); + let file_id = module_src.file_id.original_file(db); + + if let NameKind::Local(var) = self.kind { + let range = match var.parent(db) { + DefWithBody::Function(f) => f.source(db).value.syntax().text_range(), + DefWithBody::Const(c) => c.source(db).value.syntax().text_range(), + DefWithBody::Static(s) => s.source(db).value.syntax().text_range(), + }; + let mut res = FxHashMap::default(); + res.insert(file_id, Some(range)); + return SearchScope::new(res); + } + + let vis = + self.visibility.as_ref().map(|v| v.syntax().to_string()).unwrap_or("".to_string()); + + if vis.as_str() == "pub(super)" { + if let Some(parent_module) = self.container.parent(db) { + let mut res = FxHashMap::default(); + let parent_src = parent_module.definition_source(db); + let file_id = parent_src.file_id.original_file(db); + + match parent_src.value { + ModuleSource::Module(m) => { + let range = Some(m.syntax().text_range()); + res.insert(file_id, range); + } + ModuleSource::SourceFile(_) => { + res.insert(file_id, None); + res.extend(parent_module.children(db).map(|m| { + let src = m.definition_source(db); + (src.file_id.original_file(db), None) + })); + } + } + return SearchScope::new(res); + } + } + + if vis.as_str() != "" { + let source_root_id = db.file_source_root(file_id); + let source_root = db.source_root(source_root_id); + let mut res = source_root.walk().map(|id| (id, None)).collect::>(); + + // FIXME: add "pub(in path)" + + if vis.as_str() == "pub(crate)" { + return SearchScope::new(res); + } + if vis.as_str() == "pub" { + let krate = self.container.krate(); + let crate_graph = db.crate_graph(); + for crate_id in crate_graph.iter() { + let mut crate_deps = crate_graph.dependencies(crate_id); + if crate_deps.any(|dep| dep.crate_id() == krate.crate_id()) { + let root_file = crate_graph.crate_root(crate_id); + let source_root_id = db.file_source_root(root_file); + let source_root = db.source_root(source_root_id); + res.extend(source_root.walk().map(|id| (id, None))); + } + } + return SearchScope::new(res); + } + } + + let mut res = FxHashMap::default(); + let range = match module_src.value { + ModuleSource::Module(m) => Some(m.syntax().text_range()), + ModuleSource::SourceFile(_) => None, + }; + res.insert(file_id, range); + SearchScope::new(res) + } +} diff --git a/crates/ra_ide/src/runnables.rs b/crates/ra_ide/src/runnables.rs new file mode 100644 index 000000000..8039a5164 --- /dev/null +++ b/crates/ra_ide/src/runnables.rs @@ -0,0 +1,242 @@ +//! FIXME: write short doc here + +use hir::Source; +use itertools::Itertools; +use ra_db::SourceDatabase; +use ra_syntax::{ + ast::{self, AstNode, AttrsOwner, ModuleItemOwner, NameOwner}, + match_ast, SyntaxNode, TextRange, +}; + +use crate::{db::RootDatabase, FileId}; + +#[derive(Debug)] +pub struct Runnable { + pub range: TextRange, + pub kind: RunnableKind, +} + +#[derive(Debug)] +pub enum RunnableKind { + Test { name: String }, + TestMod { path: String }, + Bench { name: String }, + Bin, +} + +pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec { + let parse = db.parse(file_id); + parse.tree().syntax().descendants().filter_map(|i| runnable(db, file_id, i)).collect() +} + +fn runnable(db: &RootDatabase, file_id: FileId, item: SyntaxNode) -> Option { + match_ast! { + match item { + ast::FnDef(it) => { runnable_fn(it) }, + ast::Module(it) => { runnable_mod(db, file_id, it) }, + _ => { None }, + } + } +} + +fn runnable_fn(fn_def: ast::FnDef) -> Option { + let name = fn_def.name()?.text().clone(); + let kind = if name == "main" { + RunnableKind::Bin + } else if fn_def.has_atom_attr("test") { + RunnableKind::Test { name: name.to_string() } + } else if fn_def.has_atom_attr("bench") { + RunnableKind::Bench { name: name.to_string() } + } else { + return None; + }; + Some(Runnable { range: fn_def.syntax().text_range(), kind }) +} + +fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Option { + let has_test_function = module + .item_list()? + .items() + .filter_map(|it| match it { + ast::ModuleItem::FnDef(it) => Some(it), + _ => None, + }) + .any(|f| f.has_atom_attr("test")); + if !has_test_function { + return None; + } + let range = module.syntax().text_range(); + let src = hir::ModuleSource::from_child_node(db, Source::new(file_id.into(), &module.syntax())); + let module = hir::Module::from_definition(db, Source::new(file_id.into(), src))?; + + let path = module.path_to_root(db).into_iter().rev().filter_map(|it| it.name(db)).join("::"); + Some(Runnable { range, kind: RunnableKind::TestMod { path } }) +} + +#[cfg(test)] +mod tests { + use insta::assert_debug_snapshot; + + use crate::mock_analysis::analysis_and_position; + + #[test] + fn test_runnables() { + let (analysis, pos) = analysis_and_position( + r#" + //- /lib.rs + <|> //empty + fn main() {} + + #[test] + fn test_foo() {} + + #[test] + #[ignore] + fn test_foo() {} + "#, + ); + let runnables = analysis.runnables(pos.file_id).unwrap(); + assert_debug_snapshot!(&runnables, + @r###" + [ + Runnable { + range: [1; 21), + kind: Bin, + }, + Runnable { + range: [22; 46), + kind: Test { + name: "test_foo", + }, + }, + Runnable { + range: [47; 81), + kind: Test { + name: "test_foo", + }, + }, + ] + "### + ); + } + + #[test] + fn test_runnables_module() { + let (analysis, pos) = analysis_and_position( + r#" + //- /lib.rs + <|> //empty + mod test_mod { + #[test] + fn test_foo1() {} + } + "#, + ); + let runnables = analysis.runnables(pos.file_id).unwrap(); + assert_debug_snapshot!(&runnables, + @r###" + [ + Runnable { + range: [1; 59), + kind: TestMod { + path: "test_mod", + }, + }, + Runnable { + range: [28; 57), + kind: Test { + name: "test_foo1", + }, + }, + ] + "### + ); + } + + #[test] + fn test_runnables_one_depth_layer_module() { + let (analysis, pos) = analysis_and_position( + r#" + //- /lib.rs + <|> //empty + mod foo { + mod test_mod { + #[test] + fn test_foo1() {} + } + } + "#, + ); + let runnables = analysis.runnables(pos.file_id).unwrap(); + assert_debug_snapshot!(&runnables, + @r###" + [ + Runnable { + range: [23; 85), + kind: TestMod { + path: "foo::test_mod", + }, + }, + Runnable { + range: [46; 79), + kind: Test { + name: "test_foo1", + }, + }, + ] + "### + ); + } + + #[test] + fn test_runnables_multiple_depth_module() { + let (analysis, pos) = analysis_and_position( + r#" + //- /lib.rs + <|> //empty + mod foo { + mod bar { + mod test_mod { + #[test] + fn test_foo1() {} + } + } + } + "#, + ); + let runnables = analysis.runnables(pos.file_id).unwrap(); + assert_debug_snapshot!(&runnables, + @r###" + [ + Runnable { + range: [41; 115), + kind: TestMod { + path: "foo::bar::test_mod", + }, + }, + Runnable { + range: [68; 105), + kind: Test { + name: "test_foo1", + }, + }, + ] + "### + ); + } + + #[test] + fn test_runnables_no_test_function_in_module() { + let (analysis, pos) = analysis_and_position( + r#" + //- /lib.rs + <|> //empty + mod test_mod { + fn foo1() {} + } + "#, + ); + let runnables = analysis.runnables(pos.file_id).unwrap(); + assert!(runnables.is_empty()) + } +} diff --git a/crates/ra_ide/src/snapshots/highlighting.html b/crates/ra_ide/src/snapshots/highlighting.html new file mode 100644 index 000000000..b39c4d371 --- /dev/null +++ b/crates/ra_ide/src/snapshots/highlighting.html @@ -0,0 +1,48 @@ + + +
#[derive(Clone, Debug)]
+struct Foo {
+    pub x: i32,
+    pub y: i32,
+}
+
+fn foo<T>() -> T {
+    unimplemented!();
+    foo::<i32>();
+}
+
+// comment
+fn main() {
+    println!("Hello, {}!", 92);
+
+    let mut vec = Vec::new();
+    if true {
+        vec.push(Foo { x: 0, y: 1 });
+    }
+    unsafe { vec.set_len(0); }
+
+    let mut x = 42;
+    let y = &mut x;
+    let z = &y;
+
+    y;
+}
\ No newline at end of file diff --git a/crates/ra_ide/src/snapshots/rainbow_highlighting.html b/crates/ra_ide/src/snapshots/rainbow_highlighting.html new file mode 100644 index 000000000..79f11ea80 --- /dev/null +++ b/crates/ra_ide/src/snapshots/rainbow_highlighting.html @@ -0,0 +1,33 @@ + + +
fn main() {
+    let hello = "hello";
+    let x = hello.to_string();
+    let y = hello.to_string();
+
+    let x = "other color please!";
+    let y = x.to_string();
+}
+
+fn bar() {
+    let mut hello = "hello";
+}
\ No newline at end of file diff --git a/crates/ra_ide/src/source_change.rs b/crates/ra_ide/src/source_change.rs new file mode 100644 index 000000000..f5f7f8807 --- /dev/null +++ b/crates/ra_ide/src/source_change.rs @@ -0,0 +1,119 @@ +//! This modules defines type to represent changes to the source code, that flow +//! from the server to the client. +//! +//! It can be viewed as a dual for `AnalysisChange`. + +use ra_db::RelativePathBuf; +use ra_text_edit::TextEdit; + +use crate::{FileId, FilePosition, SourceRootId, TextUnit}; + +#[derive(Debug)] +pub struct SourceChange { + pub label: String, + pub source_file_edits: Vec, + pub file_system_edits: Vec, + pub cursor_position: Option, +} + +impl SourceChange { + /// Creates a new SourceChange with the given label + /// from the edits. + pub(crate) fn from_edits>( + label: L, + source_file_edits: Vec, + file_system_edits: Vec, + ) -> Self { + SourceChange { + label: label.into(), + source_file_edits, + file_system_edits, + cursor_position: None, + } + } + + /// Creates a new SourceChange with the given label, + /// containing only the given `SourceFileEdits`. + pub(crate) fn source_file_edits>(label: L, edits: Vec) -> Self { + SourceChange { + label: label.into(), + source_file_edits: edits, + file_system_edits: vec![], + cursor_position: None, + } + } + + /// Creates a new SourceChange with the given label, + /// containing only the given `FileSystemEdits`. + pub(crate) fn file_system_edits>(label: L, edits: Vec) -> Self { + SourceChange { + label: label.into(), + source_file_edits: vec![], + file_system_edits: edits, + cursor_position: None, + } + } + + /// Creates a new SourceChange with the given label, + /// containing only a single `SourceFileEdit`. + pub(crate) fn source_file_edit>(label: L, edit: SourceFileEdit) -> Self { + SourceChange::source_file_edits(label, vec![edit]) + } + + /// Creates a new SourceChange with the given label + /// from the given `FileId` and `TextEdit` + pub(crate) fn source_file_edit_from>( + label: L, + file_id: FileId, + edit: TextEdit, + ) -> Self { + SourceChange::source_file_edit(label, SourceFileEdit { file_id, edit }) + } + + /// Creates a new SourceChange with the given label + /// from the given `FileId` and `TextEdit` + pub(crate) fn file_system_edit>(label: L, edit: FileSystemEdit) -> Self { + SourceChange::file_system_edits(label, vec![edit]) + } + + /// Sets the cursor position to the given `FilePosition` + pub(crate) fn with_cursor(mut self, cursor_position: FilePosition) -> Self { + self.cursor_position = Some(cursor_position); + self + } + + /// Sets the cursor position to the given `FilePosition` + pub(crate) fn with_cursor_opt(mut self, cursor_position: Option) -> Self { + self.cursor_position = cursor_position; + self + } +} + +#[derive(Debug)] +pub struct SourceFileEdit { + pub file_id: FileId, + pub edit: TextEdit, +} + +#[derive(Debug)] +pub enum FileSystemEdit { + CreateFile { source_root: SourceRootId, path: RelativePathBuf }, + MoveFile { src: FileId, dst_source_root: SourceRootId, dst_path: RelativePathBuf }, +} + +pub(crate) struct SingleFileChange { + pub label: String, + pub edit: TextEdit, + pub cursor_position: Option, +} + +impl SingleFileChange { + pub(crate) fn into_source_change(self, file_id: FileId) -> SourceChange { + SourceChange { + label: self.label, + source_file_edits: vec![SourceFileEdit { file_id, edit: self.edit }], + file_system_edits: Vec::new(), + cursor_position: self.cursor_position.map(|offset| FilePosition { file_id, offset }), + } + } +} diff --git a/crates/ra_ide/src/status.rs b/crates/ra_ide/src/status.rs new file mode 100644 index 000000000..1bb27eb85 --- /dev/null +++ b/crates/ra_ide/src/status.rs @@ -0,0 +1,136 @@ +//! FIXME: write short doc here + +use std::{fmt, iter::FromIterator, sync::Arc}; + +use hir::MacroFile; +use ra_db::{ + salsa::{ + debug::{DebugQueryTable, TableEntry}, + Database, + }, + FileTextQuery, SourceRootId, +}; +use ra_prof::{memory_usage, Bytes}; +use ra_syntax::{ast, Parse, SyntaxNode}; + +use crate::{ + db::RootDatabase, + symbol_index::{LibrarySymbolsQuery, SymbolIndex}, + FileId, +}; + +fn syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { + db.query(ra_db::ParseQuery).entries::() +} +fn macro_syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { + db.query(hir::db::ParseMacroQuery).entries::() +} + +pub(crate) fn status(db: &RootDatabase) -> String { + let files_stats = db.query(FileTextQuery).entries::(); + let syntax_tree_stats = syntax_tree_stats(db); + let macro_syntax_tree_stats = macro_syntax_tree_stats(db); + let symbols_stats = db.query(LibrarySymbolsQuery).entries::(); + format!( + "{}\n{}\n{}\n{} (macros)\n\n\nmemory:\n{}\ngc {:?} seconds ago", + files_stats, + symbols_stats, + syntax_tree_stats, + macro_syntax_tree_stats, + memory_usage(), + db.last_gc.elapsed().as_secs(), + ) +} + +#[derive(Default)] +struct FilesStats { + total: usize, + size: Bytes, +} + +impl fmt::Display for FilesStats { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + write!(fmt, "{} ({}) files", self.total, self.size) + } +} + +impl FromIterator>> for FilesStats { + fn from_iter(iter: T) -> FilesStats + where + T: IntoIterator>>, + { + let mut res = FilesStats::default(); + for entry in iter { + res.total += 1; + res.size += entry.value.unwrap().len(); + } + res + } +} + +#[derive(Default)] +pub(crate) struct SyntaxTreeStats { + total: usize, + pub(crate) retained: usize, +} + +impl fmt::Display for SyntaxTreeStats { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + write!(fmt, "{} trees, {} retained", self.total, self.retained) + } +} + +impl FromIterator>> for SyntaxTreeStats { + fn from_iter(iter: T) -> SyntaxTreeStats + where + T: IntoIterator>>, + { + let mut res = SyntaxTreeStats::default(); + for entry in iter { + res.total += 1; + res.retained += entry.value.is_some() as usize; + } + res + } +} + +impl FromIterator, M)>>> for SyntaxTreeStats { + fn from_iter(iter: T) -> SyntaxTreeStats + where + T: IntoIterator, M)>>>, + { + let mut res = SyntaxTreeStats::default(); + for entry in iter { + res.total += 1; + res.retained += entry.value.is_some() as usize; + } + res + } +} + +#[derive(Default)] +struct LibrarySymbolsStats { + total: usize, + size: Bytes, +} + +impl fmt::Display for LibrarySymbolsStats { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + write!(fmt, "{} ({}) symbols", self.total, self.size,) + } +} + +impl FromIterator>> for LibrarySymbolsStats { + fn from_iter(iter: T) -> LibrarySymbolsStats + where + T: IntoIterator>>, + { + let mut res = LibrarySymbolsStats::default(); + for entry in iter { + let value = entry.value.unwrap(); + res.total += value.len(); + res.size += value.memory_size(); + } + res + } +} diff --git a/crates/ra_ide/src/symbol_index.rs b/crates/ra_ide/src/symbol_index.rs new file mode 100644 index 000000000..5729eb5b3 --- /dev/null +++ b/crates/ra_ide/src/symbol_index.rs @@ -0,0 +1,405 @@ +//! This module handles fuzzy-searching of functions, structs and other symbols +//! by name across the whole workspace and dependencies. +//! +//! It works by building an incrementally-updated text-search index of all +//! symbols. The backbone of the index is the **awesome** `fst` crate by +//! @BurntSushi. +//! +//! In a nutshell, you give a set of strings to `fst`, and it builds a +//! finite state machine describing this set of strings. The strings which +//! could fuzzy-match a pattern can also be described by a finite state machine. +//! What is freaking cool is that you can now traverse both state machines in +//! lock-step to enumerate the strings which are both in the input set and +//! fuzz-match the query. Or, more formally, given two languages described by +//! FSTs, one can build a product FST which describes the intersection of the +//! languages. +//! +//! `fst` does not support cheap updating of the index, but it supports unioning +//! of state machines. So, to account for changing source code, we build an FST +//! for each library (which is assumed to never change) and an FST for each Rust +//! file in the current workspace, and run a query against the union of all +//! those FSTs. +use std::{ + fmt, + hash::{Hash, Hasher}, + mem, + sync::Arc, +}; + +use fst::{self, Streamer}; +use ra_db::{ + salsa::{self, ParallelDatabase}, + SourceDatabaseExt, SourceRootId, +}; +use ra_syntax::{ + ast::{self, NameOwner}, + match_ast, AstNode, Parse, SmolStr, SourceFile, + SyntaxKind::{self, *}, + SyntaxNode, SyntaxNodePtr, TextRange, WalkEvent, +}; +#[cfg(not(feature = "wasm"))] +use rayon::prelude::*; + +use crate::{db::RootDatabase, FileId, Query}; + +#[salsa::query_group(SymbolsDatabaseStorage)] +pub(crate) trait SymbolsDatabase: hir::db::HirDatabase { + fn file_symbols(&self, file_id: FileId) -> Arc; + #[salsa::input] + fn library_symbols(&self, id: SourceRootId) -> Arc; + /// The set of "local" (that is, from the current workspace) roots. + /// Files in local roots are assumed to change frequently. + #[salsa::input] + fn local_roots(&self) -> Arc>; + /// The set of roots for crates.io libraries. + /// Files in libraries are assumed to never change. + #[salsa::input] + fn library_roots(&self) -> Arc>; +} + +fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc { + db.check_canceled(); + let parse = db.parse(file_id); + + let symbols = source_file_to_file_symbols(&parse.tree(), file_id); + + // FIXME: add macros here + + Arc::new(SymbolIndex::new(symbols)) +} + +pub(crate) fn world_symbols(db: &RootDatabase, query: Query) -> Vec { + /// Need to wrap Snapshot to provide `Clone` impl for `map_with` + struct Snap(salsa::Snapshot); + impl Clone for Snap { + fn clone(&self) -> Snap { + Snap(self.0.snapshot()) + } + } + + let buf: Vec> = if query.libs { + let snap = Snap(db.snapshot()); + #[cfg(not(feature = "wasm"))] + let buf = db + .library_roots() + .par_iter() + .map_with(snap, |db, &lib_id| db.0.library_symbols(lib_id)) + .collect(); + + #[cfg(feature = "wasm")] + let buf = db.library_roots().iter().map(|&lib_id| snap.0.library_symbols(lib_id)).collect(); + + buf + } else { + let mut files = Vec::new(); + for &root in db.local_roots().iter() { + let sr = db.source_root(root); + files.extend(sr.walk()) + } + + let snap = Snap(db.snapshot()); + #[cfg(not(feature = "wasm"))] + let buf = + files.par_iter().map_with(snap, |db, &file_id| db.0.file_symbols(file_id)).collect(); + + #[cfg(feature = "wasm")] + let buf = files.iter().map(|&file_id| snap.0.file_symbols(file_id)).collect(); + + buf + }; + query.search(&buf) +} + +pub(crate) fn index_resolve(db: &RootDatabase, name_ref: &ast::NameRef) -> Vec { + let name = name_ref.text(); + let mut query = Query::new(name.to_string()); + query.exact(); + query.limit(4); + crate::symbol_index::world_symbols(db, query) +} + +#[derive(Default)] +pub(crate) struct SymbolIndex { + symbols: Vec, + map: fst::Map, +} + +impl fmt::Debug for SymbolIndex { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("SymbolIndex").field("n_symbols", &self.symbols.len()).finish() + } +} + +impl PartialEq for SymbolIndex { + fn eq(&self, other: &SymbolIndex) -> bool { + self.symbols == other.symbols + } +} + +impl Eq for SymbolIndex {} + +impl Hash for SymbolIndex { + fn hash(&self, hasher: &mut H) { + self.symbols.hash(hasher) + } +} + +impl SymbolIndex { + fn new(mut symbols: Vec) -> SymbolIndex { + fn cmp_key<'a>(s1: &'a FileSymbol) -> impl Ord + 'a { + unicase::Ascii::new(s1.name.as_str()) + } + #[cfg(not(feature = "wasm"))] + symbols.par_sort_by(|s1, s2| cmp_key(s1).cmp(&cmp_key(s2))); + + #[cfg(feature = "wasm")] + symbols.sort_by(|s1, s2| cmp_key(s1).cmp(&cmp_key(s2))); + + let mut builder = fst::MapBuilder::memory(); + + let mut last_batch_start = 0; + + for idx in 0..symbols.len() { + if symbols.get(last_batch_start).map(cmp_key) == symbols.get(idx + 1).map(cmp_key) { + continue; + } + + let start = last_batch_start; + let end = idx + 1; + last_batch_start = end; + + let key = symbols[start].name.as_str().to_lowercase(); + let value = SymbolIndex::range_to_map_value(start, end); + + builder.insert(key, value).unwrap(); + } + + let map = fst::Map::from_bytes(builder.into_inner().unwrap()).unwrap(); + SymbolIndex { symbols, map } + } + + pub(crate) fn len(&self) -> usize { + self.symbols.len() + } + + pub(crate) fn memory_size(&self) -> usize { + self.map.as_fst().size() + self.symbols.len() * mem::size_of::() + } + + #[cfg(not(feature = "wasm"))] + pub(crate) fn for_files( + files: impl ParallelIterator)>, + ) -> SymbolIndex { + let symbols = files + .flat_map(|(file_id, file)| source_file_to_file_symbols(&file.tree(), file_id)) + .collect::>(); + SymbolIndex::new(symbols) + } + + #[cfg(feature = "wasm")] + pub(crate) fn for_files( + files: impl Iterator)>, + ) -> SymbolIndex { + let symbols = files + .flat_map(|(file_id, file)| source_file_to_file_symbols(&file.tree(), file_id)) + .collect::>(); + SymbolIndex::new(symbols) + } + + fn range_to_map_value(start: usize, end: usize) -> u64 { + debug_assert![start <= (std::u32::MAX as usize)]; + debug_assert![end <= (std::u32::MAX as usize)]; + + ((start as u64) << 32) | end as u64 + } + + fn map_value_to_range(value: u64) -> (usize, usize) { + let end = value as u32 as usize; + let start = (value >> 32) as usize; + (start, end) + } +} + +impl Query { + pub(crate) fn search(self, indices: &[Arc]) -> Vec { + let mut op = fst::map::OpBuilder::new(); + for file_symbols in indices.iter() { + let automaton = fst::automaton::Subsequence::new(&self.lowercased); + op = op.add(file_symbols.map.search(automaton)) + } + let mut stream = op.union(); + let mut res = Vec::new(); + while let Some((_, indexed_values)) = stream.next() { + if res.len() >= self.limit { + break; + } + for indexed_value in indexed_values { + let symbol_index = &indices[indexed_value.index]; + let (start, end) = SymbolIndex::map_value_to_range(indexed_value.value); + + for symbol in &symbol_index.symbols[start..end] { + if self.only_types && !is_type(symbol.ptr.kind()) { + continue; + } + if self.exact && symbol.name != self.query { + continue; + } + res.push(symbol.clone()); + } + } + } + res + } +} + +fn is_type(kind: SyntaxKind) -> bool { + match kind { + STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_ALIAS_DEF => true, + _ => false, + } +} + +/// The actual data that is stored in the index. It should be as compact as +/// possible. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub(crate) struct FileSymbol { + pub(crate) file_id: FileId, + pub(crate) name: SmolStr, + pub(crate) ptr: SyntaxNodePtr, + pub(crate) name_range: Option, + pub(crate) container_name: Option, +} + +fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec { + let mut symbols = Vec::new(); + let mut stack = Vec::new(); + + for event in source_file.syntax().preorder() { + match event { + WalkEvent::Enter(node) => { + if let Some(mut symbol) = to_file_symbol(&node, file_id) { + symbol.container_name = stack.last().cloned(); + + stack.push(symbol.name.clone()); + symbols.push(symbol); + } + } + + WalkEvent::Leave(node) => { + if to_symbol(&node).is_some() { + stack.pop(); + } + } + } + } + + symbols +} + +fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { + fn decl(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { + let name = node.name()?; + let name_range = name.syntax().text_range(); + let name = name.text().clone(); + let ptr = SyntaxNodePtr::new(node.syntax()); + + Some((name, ptr, name_range)) + } + match_ast! { + match node { + ast::FnDef(it) => { decl(it) }, + ast::StructDef(it) => { decl(it) }, + ast::EnumDef(it) => { decl(it) }, + ast::TraitDef(it) => { decl(it) }, + ast::Module(it) => { decl(it) }, + ast::TypeAliasDef(it) => { decl(it) }, + ast::ConstDef(it) => { decl(it) }, + ast::StaticDef(it) => { decl(it) }, + _ => None, + } + } +} + +fn to_file_symbol(node: &SyntaxNode, file_id: FileId) -> Option { + to_symbol(node).map(move |(name, ptr, name_range)| FileSymbol { + name, + ptr, + file_id, + name_range: Some(name_range), + container_name: None, + }) +} + +#[cfg(test)] +mod tests { + use crate::{display::NavigationTarget, mock_analysis::single_file, Query}; + use ra_syntax::{ + SmolStr, + SyntaxKind::{FN_DEF, STRUCT_DEF}, + }; + + #[test] + fn test_world_symbols_with_no_container() { + let code = r#" + enum FooInner { } + "#; + + let mut symbols = get_symbols_matching(code, "FooInner"); + + let s = symbols.pop().unwrap(); + + assert_eq!(s.name(), "FooInner"); + assert!(s.container_name().is_none()); + } + + #[test] + fn test_world_symbols_include_container_name() { + let code = r#" +fn foo() { + enum FooInner { } +} + "#; + + let mut symbols = get_symbols_matching(code, "FooInner"); + + let s = symbols.pop().unwrap(); + + assert_eq!(s.name(), "FooInner"); + assert_eq!(s.container_name(), Some(&SmolStr::new("foo"))); + + let code = r#" +mod foo { + struct FooInner; +} + "#; + + let mut symbols = get_symbols_matching(code, "FooInner"); + + let s = symbols.pop().unwrap(); + + assert_eq!(s.name(), "FooInner"); + assert_eq!(s.container_name(), Some(&SmolStr::new("foo"))); + } + + #[test] + fn test_world_symbols_are_case_sensitive() { + let code = r#" +fn foo() {} + +struct Foo; + "#; + + let symbols = get_symbols_matching(code, "Foo"); + + let fn_match = symbols.iter().find(|s| s.name() == "foo").map(|s| s.kind()); + let struct_match = symbols.iter().find(|s| s.name() == "Foo").map(|s| s.kind()); + + assert_eq!(fn_match, Some(FN_DEF)); + assert_eq!(struct_match, Some(STRUCT_DEF)); + } + + fn get_symbols_matching(text: &str, query: &str) -> Vec { + let (analysis, _) = single_file(text); + analysis.symbol_search(Query::new(query.into())).unwrap() + } +} diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs new file mode 100644 index 000000000..2c568a747 --- /dev/null +++ b/crates/ra_ide/src/syntax_highlighting.rs @@ -0,0 +1,342 @@ +//! FIXME: write short doc here + +use rustc_hash::{FxHashMap, FxHashSet}; + +use hir::{Name, Source}; +use ra_db::SourceDatabase; +use ra_prof::profile; +use ra_syntax::{ast, AstNode, Direction, SyntaxElement, SyntaxKind, SyntaxKind::*, TextRange, T}; + +use crate::{ + db::RootDatabase, + references::{ + classify_name, classify_name_ref, + NameKind::{self, *}, + }, + FileId, +}; + +#[derive(Debug)] +pub struct HighlightedRange { + pub range: TextRange, + pub tag: &'static str, + pub binding_hash: Option, +} + +fn is_control_keyword(kind: SyntaxKind) -> bool { + match kind { + T![for] + | T![loop] + | T![while] + | T![continue] + | T![break] + | T![if] + | T![else] + | T![match] + | T![return] => true, + _ => false, + } +} + +pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec { + let _p = profile("highlight"); + let parse = db.parse(file_id); + let root = parse.tree().syntax().clone(); + + fn calc_binding_hash(file_id: FileId, name: &Name, shadow_count: u32) -> u64 { + fn hash(x: T) -> u64 { + use std::{collections::hash_map::DefaultHasher, hash::Hasher}; + + let mut hasher = DefaultHasher::new(); + x.hash(&mut hasher); + hasher.finish() + } + + hash((file_id, name, shadow_count)) + } + + // Visited nodes to handle highlighting priorities + // FIXME: retain only ranges here + let mut highlighted: FxHashSet = FxHashSet::default(); + let mut bindings_shadow_count: FxHashMap = FxHashMap::default(); + + let mut res = Vec::new(); + for node in root.descendants_with_tokens() { + if highlighted.contains(&node) { + continue; + } + let mut binding_hash = None; + let tag = match node.kind() { + FN_DEF => { + bindings_shadow_count.clear(); + continue; + } + COMMENT => "comment", + STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => "string", + ATTR => "attribute", + NAME_REF => { + if node.ancestors().any(|it| it.kind() == ATTR) { + continue; + } + + let name_ref = node.as_node().cloned().and_then(ast::NameRef::cast).unwrap(); + let name_kind = + classify_name_ref(db, Source::new(file_id.into(), &name_ref)).map(|d| d.kind); + + if let Some(Local(local)) = &name_kind { + if let Some(name) = local.name(db) { + let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); + binding_hash = Some(calc_binding_hash(file_id, &name, *shadow_count)) + } + }; + + name_kind.map_or("text", |it| highlight_name(db, it)) + } + NAME => { + let name = node.as_node().cloned().and_then(ast::Name::cast).unwrap(); + let name_kind = + classify_name(db, Source::new(file_id.into(), &name)).map(|d| d.kind); + + if let Some(Local(local)) = &name_kind { + if let Some(name) = local.name(db) { + let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); + *shadow_count += 1; + binding_hash = Some(calc_binding_hash(file_id, &name, *shadow_count)) + } + }; + + match name_kind { + Some(name_kind) => highlight_name(db, name_kind), + None => name.syntax().parent().map_or("function", |x| match x.kind() { + TYPE_PARAM | STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_ALIAS_DEF => "type", + RECORD_FIELD_DEF => "field", + _ => "function", + }), + } + } + INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE => "literal", + LIFETIME => "parameter", + T![unsafe] => "keyword.unsafe", + k if is_control_keyword(k) => "keyword.control", + k if k.is_keyword() => "keyword", + _ => { + if let Some(macro_call) = node.as_node().cloned().and_then(ast::MacroCall::cast) { + if let Some(path) = macro_call.path() { + if let Some(segment) = path.segment() { + if let Some(name_ref) = segment.name_ref() { + highlighted.insert(name_ref.syntax().clone().into()); + let range_start = name_ref.syntax().text_range().start(); + let mut range_end = name_ref.syntax().text_range().end(); + for sibling in path.syntax().siblings_with_tokens(Direction::Next) { + match sibling.kind() { + T![!] | IDENT => range_end = sibling.text_range().end(), + _ => (), + } + } + res.push(HighlightedRange { + range: TextRange::from_to(range_start, range_end), + tag: "macro", + binding_hash: None, + }) + } + } + } + } + continue; + } + }; + res.push(HighlightedRange { range: node.text_range(), tag, binding_hash }) + } + res +} + +pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { + let parse = db.parse(file_id); + + fn rainbowify(seed: u64) -> String { + use rand::prelude::*; + let mut rng = SmallRng::seed_from_u64(seed); + format!( + "hsl({h},{s}%,{l}%)", + h = rng.gen_range::(0, 361), + s = rng.gen_range::(42, 99), + l = rng.gen_range::(40, 91), + ) + } + + let mut ranges = highlight(db, file_id); + ranges.sort_by_key(|it| it.range.start()); + // quick non-optimal heuristic to intersect token ranges and highlighted ranges + let mut frontier = 0; + let mut could_intersect: Vec<&HighlightedRange> = Vec::new(); + + let mut buf = String::new(); + buf.push_str(&STYLE); + buf.push_str("
");
+    let tokens = parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.into_token());
+    for token in tokens {
+        could_intersect.retain(|it| token.text_range().start() <= it.range.end());
+        while let Some(r) = ranges.get(frontier) {
+            if r.range.start() <= token.text_range().end() {
+                could_intersect.push(r);
+                frontier += 1;
+            } else {
+                break;
+            }
+        }
+        let text = html_escape(&token.text());
+        let ranges = could_intersect
+            .iter()
+            .filter(|it| token.text_range().is_subrange(&it.range))
+            .collect::>();
+        if ranges.is_empty() {
+            buf.push_str(&text);
+        } else {
+            let classes = ranges.iter().map(|x| x.tag).collect::>().join(" ");
+            let binding_hash = ranges.first().and_then(|x| x.binding_hash);
+            let color = match (rainbow, binding_hash) {
+                (true, Some(hash)) => format!(
+                    " data-binding-hash=\"{}\" style=\"color: {};\"",
+                    hash,
+                    rainbowify(hash)
+                ),
+                _ => "".into(),
+            };
+            buf.push_str(&format!("{}", classes, color, text));
+        }
+    }
+    buf.push_str("
"); + buf +} + +fn highlight_name(db: &RootDatabase, name_kind: NameKind) -> &'static str { + match name_kind { + Macro(_) => "macro", + Field(_) => "field", + AssocItem(hir::AssocItem::Function(_)) => "function", + AssocItem(hir::AssocItem::Const(_)) => "constant", + AssocItem(hir::AssocItem::TypeAlias(_)) => "type", + Def(hir::ModuleDef::Module(_)) => "module", + Def(hir::ModuleDef::Function(_)) => "function", + Def(hir::ModuleDef::Adt(_)) => "type", + Def(hir::ModuleDef::EnumVariant(_)) => "constant", + Def(hir::ModuleDef::Const(_)) => "constant", + Def(hir::ModuleDef::Static(_)) => "constant", + Def(hir::ModuleDef::Trait(_)) => "type", + Def(hir::ModuleDef::TypeAlias(_)) => "type", + Def(hir::ModuleDef::BuiltinType(_)) => "type", + SelfType(_) => "type", + GenericParam(_) => "type", + Local(local) => { + if local.is_mut(db) { + "variable.mut" + } else if local.ty(db).is_mutable_reference() { + "variable.mut" + } else { + "variable" + } + } + } +} + +//FIXME: like, real html escaping +fn html_escape(text: &str) -> String { + text.replace("<", "<").replace(">", ">") +} + +const STYLE: &str = " + +"; + +#[cfg(test)] +mod tests { + use crate::mock_analysis::single_file; + use test_utils::{assert_eq_text, project_dir, read_text}; + + #[test] + fn test_highlighting() { + let (analysis, file_id) = single_file( + r#" +#[derive(Clone, Debug)] +struct Foo { + pub x: i32, + pub y: i32, +} + +fn foo() -> T { + unimplemented!(); + foo::(); +} + +// comment +fn main() { + println!("Hello, {}!", 92); + + let mut vec = Vec::new(); + if true { + vec.push(Foo { x: 0, y: 1 }); + } + unsafe { vec.set_len(0); } + + let mut x = 42; + let y = &mut x; + let z = &y; + + y; +} +"# + .trim(), + ); + let dst_file = project_dir().join("crates/ra_ide/src/snapshots/highlighting.html"); + let actual_html = &analysis.highlight_as_html(file_id, false).unwrap(); + let expected_html = &read_text(&dst_file); + std::fs::write(dst_file, &actual_html).unwrap(); + assert_eq_text!(expected_html, actual_html); + } + + #[test] + fn test_rainbow_highlighting() { + let (analysis, file_id) = single_file( + r#" +fn main() { + let hello = "hello"; + let x = hello.to_string(); + let y = hello.to_string(); + + let x = "other color please!"; + let y = x.to_string(); +} + +fn bar() { + let mut hello = "hello"; +} +"# + .trim(), + ); + let dst_file = + project_dir().join("crates/ra_ide/src/snapshots/rainbow_highlighting.html"); + let actual_html = &analysis.highlight_as_html(file_id, true).unwrap(); + let expected_html = &read_text(&dst_file); + std::fs::write(dst_file, &actual_html).unwrap(); + assert_eq_text!(expected_html, actual_html); + } +} diff --git a/crates/ra_ide/src/syntax_tree.rs b/crates/ra_ide/src/syntax_tree.rs new file mode 100644 index 000000000..4d0f0fc47 --- /dev/null +++ b/crates/ra_ide/src/syntax_tree.rs @@ -0,0 +1,359 @@ +//! FIXME: write short doc here + +use crate::db::RootDatabase; +use ra_db::SourceDatabase; +use ra_syntax::{ + algo, AstNode, NodeOrToken, SourceFile, + SyntaxKind::{RAW_STRING, STRING}, + SyntaxToken, TextRange, +}; + +pub use ra_db::FileId; + +pub(crate) fn syntax_tree( + db: &RootDatabase, + file_id: FileId, + text_range: Option, +) -> String { + let parse = db.parse(file_id); + if let Some(text_range) = text_range { + let node = match algo::find_covering_element(parse.tree().syntax(), text_range) { + NodeOrToken::Node(node) => node, + NodeOrToken::Token(token) => { + if let Some(tree) = syntax_tree_for_string(&token, text_range) { + return tree; + } + token.parent() + } + }; + + format!("{:#?}", node) + } else { + format!("{:#?}", parse.tree().syntax()) + } +} + +/// Attempts parsing the selected contents of a string literal +/// as rust syntax and returns its syntax tree +fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option { + // When the range is inside a string + // we'll attempt parsing it as rust syntax + // to provide the syntax tree of the contents of the string + match token.kind() { + STRING | RAW_STRING => syntax_tree_for_token(token, text_range), + _ => None, + } +} + +fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option { + // Range of the full node + let node_range = node.text_range(); + let text = node.text().to_string(); + + // We start at some point inside the node + // Either we have selected the whole string + // or our selection is inside it + let start = text_range.start() - node_range.start(); + + // how many characters we have selected + let len = text_range.len().to_usize(); + + let node_len = node_range.len().to_usize(); + + let start = start.to_usize(); + + // We want to cap our length + let len = len.min(node_len); + + // Ensure our slice is inside the actual string + let end = if start + len < text.len() { start + len } else { text.len() - start }; + + let text = &text[start..end]; + + // Remove possible extra string quotes from the start + // and the end of the string + let text = text + .trim_start_matches('r') + .trim_start_matches('#') + .trim_start_matches('"') + .trim_end_matches('#') + .trim_end_matches('"') + .trim() + // Remove custom markers + .replace("<|>", ""); + + let parsed = SourceFile::parse(&text); + + // If the "file" parsed without errors, + // return its syntax + if parsed.errors().is_empty() { + return Some(format!("{:#?}", parsed.tree().syntax())); + } + + None +} + +#[cfg(test)] +mod tests { + use test_utils::assert_eq_text; + + use crate::mock_analysis::{single_file, single_file_with_range}; + + #[test] + fn test_syntax_tree_without_range() { + // Basic syntax + let (analysis, file_id) = single_file(r#"fn foo() {}"#); + let syn = analysis.syntax_tree(file_id, None).unwrap(); + + assert_eq_text!( + syn.trim(), + r#" +SOURCE_FILE@[0; 11) + FN_DEF@[0; 11) + FN_KW@[0; 2) "fn" + WHITESPACE@[2; 3) " " + NAME@[3; 6) + IDENT@[3; 6) "foo" + PARAM_LIST@[6; 8) + L_PAREN@[6; 7) "(" + R_PAREN@[7; 8) ")" + WHITESPACE@[8; 9) " " + BLOCK_EXPR@[9; 11) + BLOCK@[9; 11) + L_CURLY@[9; 10) "{" + R_CURLY@[10; 11) "}" +"# + .trim() + ); + + let (analysis, file_id) = single_file( + r#" +fn test() { + assert!(" + fn foo() { + } + ", ""); +}"# + .trim(), + ); + let syn = analysis.syntax_tree(file_id, None).unwrap(); + + assert_eq_text!( + syn.trim(), + r#" +SOURCE_FILE@[0; 60) + FN_DEF@[0; 60) + FN_KW@[0; 2) "fn" + WHITESPACE@[2; 3) " " + NAME@[3; 7) + IDENT@[3; 7) "test" + PARAM_LIST@[7; 9) + L_PAREN@[7; 8) "(" + R_PAREN@[8; 9) ")" + WHITESPACE@[9; 10) " " + BLOCK_EXPR@[10; 60) + BLOCK@[10; 60) + L_CURLY@[10; 11) "{" + WHITESPACE@[11; 16) "\n " + EXPR_STMT@[16; 58) + MACRO_CALL@[16; 57) + PATH@[16; 22) + PATH_SEGMENT@[16; 22) + NAME_REF@[16; 22) + IDENT@[16; 22) "assert" + EXCL@[22; 23) "!" + TOKEN_TREE@[23; 57) + L_PAREN@[23; 24) "(" + STRING@[24; 52) "\"\n fn foo() {\n ..." + COMMA@[52; 53) "," + WHITESPACE@[53; 54) " " + STRING@[54; 56) "\"\"" + R_PAREN@[56; 57) ")" + SEMI@[57; 58) ";" + WHITESPACE@[58; 59) "\n" + R_CURLY@[59; 60) "}" +"# + .trim() + ); + } + + #[test] + fn test_syntax_tree_with_range() { + let (analysis, range) = single_file_with_range(r#"<|>fn foo() {}<|>"#.trim()); + let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); + + assert_eq_text!( + syn.trim(), + r#" +FN_DEF@[0; 11) + FN_KW@[0; 2) "fn" + WHITESPACE@[2; 3) " " + NAME@[3; 6) + IDENT@[3; 6) "foo" + PARAM_LIST@[6; 8) + L_PAREN@[6; 7) "(" + R_PAREN@[7; 8) ")" + WHITESPACE@[8; 9) " " + BLOCK_EXPR@[9; 11) + BLOCK@[9; 11) + L_CURLY@[9; 10) "{" + R_CURLY@[10; 11) "}" +"# + .trim() + ); + + let (analysis, range) = single_file_with_range( + r#"fn test() { + <|>assert!(" + fn foo() { + } + ", "");<|> +}"# + .trim(), + ); + let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); + + assert_eq_text!( + syn.trim(), + r#" +EXPR_STMT@[16; 58) + MACRO_CALL@[16; 57) + PATH@[16; 22) + PATH_SEGMENT@[16; 22) + NAME_REF@[16; 22) + IDENT@[16; 22) "assert" + EXCL@[22; 23) "!" + TOKEN_TREE@[23; 57) + L_PAREN@[23; 24) "(" + STRING@[24; 52) "\"\n fn foo() {\n ..." + COMMA@[52; 53) "," + WHITESPACE@[53; 54) " " + STRING@[54; 56) "\"\"" + R_PAREN@[56; 57) ")" + SEMI@[57; 58) ";" +"# + .trim() + ); + } + + #[test] + fn test_syntax_tree_inside_string() { + let (analysis, range) = single_file_with_range( + r#"fn test() { + assert!(" +<|>fn foo() { +}<|> +fn bar() { +} + ", ""); +}"# + .trim(), + ); + let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); + assert_eq_text!( + syn.trim(), + r#" +SOURCE_FILE@[0; 12) + FN_DEF@[0; 12) + FN_KW@[0; 2) "fn" + WHITESPACE@[2; 3) " " + NAME@[3; 6) + IDENT@[3; 6) "foo" + PARAM_LIST@[6; 8) + L_PAREN@[6; 7) "(" + R_PAREN@[7; 8) ")" + WHITESPACE@[8; 9) " " + BLOCK_EXPR@[9; 12) + BLOCK@[9; 12) + L_CURLY@[9; 10) "{" + WHITESPACE@[10; 11) "\n" + R_CURLY@[11; 12) "}" +"# + .trim() + ); + + // With a raw string + let (analysis, range) = single_file_with_range( + r###"fn test() { + assert!(r#" +<|>fn foo() { +}<|> +fn bar() { +} + "#, ""); +}"### + .trim(), + ); + let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); + assert_eq_text!( + syn.trim(), + r#" +SOURCE_FILE@[0; 12) + FN_DEF@[0; 12) + FN_KW@[0; 2) "fn" + WHITESPACE@[2; 3) " " + NAME@[3; 6) + IDENT@[3; 6) "foo" + PARAM_LIST@[6; 8) + L_PAREN@[6; 7) "(" + R_PAREN@[7; 8) ")" + WHITESPACE@[8; 9) " " + BLOCK_EXPR@[9; 12) + BLOCK@[9; 12) + L_CURLY@[9; 10) "{" + WHITESPACE@[10; 11) "\n" + R_CURLY@[11; 12) "}" +"# + .trim() + ); + + // With a raw string + let (analysis, range) = single_file_with_range( + r###"fn test() { + assert!(r<|>#" +fn foo() { +} +fn bar() { +}"<|>#, ""); +}"### + .trim(), + ); + let syn = analysis.syntax_tree(range.file_id, Some(range.range)).unwrap(); + assert_eq_text!( + syn.trim(), + r#" +SOURCE_FILE@[0; 25) + FN_DEF@[0; 12) + FN_KW@[0; 2) "fn" + WHITESPACE@[2; 3) " " + NAME@[3; 6) + IDENT@[3; 6) "foo" + PARAM_LIST@[6; 8) + L_PAREN@[6; 7) "(" + R_PAREN@[7; 8) ")" + WHITESPACE@[8; 9) " " + BLOCK_EXPR@[9; 12) + BLOCK@[9; 12) + L_CURLY@[9; 10) "{" + WHITESPACE@[10; 11) "\n" + R_CURLY@[11; 12) "}" + WHITESPACE@[12; 13) "\n" + FN_DEF@[13; 25) + FN_KW@[13; 15) "fn" + WHITESPACE@[15; 16) " " + NAME@[16; 19) + IDENT@[16; 19) "bar" + PARAM_LIST@[19; 21) + L_PAREN@[19; 20) "(" + R_PAREN@[20; 21) ")" + WHITESPACE@[21; 22) " " + BLOCK_EXPR@[22; 25) + BLOCK@[22; 25) + L_CURLY@[22; 23) "{" + WHITESPACE@[23; 24) "\n" + R_CURLY@[24; 25) "}" +"# + .trim() + ); + } +} diff --git a/crates/ra_ide/src/test_utils.rs b/crates/ra_ide/src/test_utils.rs new file mode 100644 index 000000000..8adb214d4 --- /dev/null +++ b/crates/ra_ide/src/test_utils.rs @@ -0,0 +1,21 @@ +//! FIXME: write short doc here + +use ra_syntax::{SourceFile, TextUnit}; +use ra_text_edit::TextEdit; + +pub use test_utils::*; + +pub fn check_action Option>( + before: &str, + after: &str, + f: F, +) { + let (before_cursor_pos, before) = extract_offset(before); + let file = SourceFile::parse(&before).ok().unwrap(); + let result = f(&file, before_cursor_pos).expect("code action is not applicable"); + let actual = result.apply(&before); + let actual_cursor_pos = + result.apply_to_offset(before_cursor_pos).expect("cursor position is affected by the edit"); + let actual = add_cursor(&actual, actual_cursor_pos); + assert_eq_text!(after, &actual); +} diff --git a/crates/ra_ide/src/typing.rs b/crates/ra_ide/src/typing.rs new file mode 100644 index 000000000..21e5be9b3 --- /dev/null +++ b/crates/ra_ide/src/typing.rs @@ -0,0 +1,490 @@ +//! This module handles auto-magic editing actions applied together with users +//! edits. For example, if the user typed +//! +//! ```text +//! foo +//! .bar() +//! .baz() +//! | // <- cursor is here +//! ``` +//! +//! and types `.` next, we want to indent the dot. +//! +//! Language server executes such typing assists synchronously. That is, they +//! block user's typing and should be pretty fast for this reason! + +use ra_db::{FilePosition, SourceDatabase}; +use ra_fmt::leading_indent; +use ra_syntax::{ + algo::find_node_at_offset, + ast::{self, AstToken}, + AstNode, SmolStr, SourceFile, + SyntaxKind::*, + SyntaxToken, TextRange, TextUnit, TokenAtOffset, +}; +use ra_text_edit::TextEdit; + +use crate::{db::RootDatabase, source_change::SingleFileChange, SourceChange, SourceFileEdit}; + +pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option { + let parse = db.parse(position.file_id); + let file = parse.tree(); + let comment = file + .syntax() + .token_at_offset(position.offset) + .left_biased() + .and_then(ast::Comment::cast)?; + + if comment.kind().shape.is_block() { + return None; + } + + let prefix = comment.prefix(); + let comment_range = comment.syntax().text_range(); + if position.offset < comment_range.start() + TextUnit::of_str(prefix) + TextUnit::from(1) { + return None; + } + + // Continuing non-doc line comments (like this one :) ) is annoying + if prefix == "//" && comment_range.end() == position.offset { + return None; + } + + let indent = node_indent(&file, comment.syntax())?; + let inserted = format!("\n{}{} ", indent, prefix); + let cursor_position = position.offset + TextUnit::of_str(&inserted); + let edit = TextEdit::insert(position.offset, inserted); + + Some( + SourceChange::source_file_edit( + "on enter", + SourceFileEdit { edit, file_id: position.file_id }, + ) + .with_cursor(FilePosition { offset: cursor_position, file_id: position.file_id }), + ) +} + +fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option { + let ws = match file.syntax().token_at_offset(token.text_range().start()) { + TokenAtOffset::Between(l, r) => { + assert!(r == *token); + l + } + TokenAtOffset::Single(n) => { + assert!(n == *token); + return Some("".into()); + } + TokenAtOffset::None => unreachable!(), + }; + if ws.kind() != WHITESPACE { + return None; + } + let text = ws.text(); + let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0); + Some(text[pos..].into()) +} + +pub(crate) const TRIGGER_CHARS: &str = ".=>"; + +pub(crate) fn on_char_typed( + db: &RootDatabase, + position: FilePosition, + char_typed: char, +) -> Option { + assert!(TRIGGER_CHARS.contains(char_typed)); + let file = &db.parse(position.file_id).tree(); + assert_eq!(file.syntax().text().char_at(position.offset), Some(char_typed)); + let single_file_change = on_char_typed_inner(file, position.offset, char_typed)?; + Some(single_file_change.into_source_change(position.file_id)) +} + +fn on_char_typed_inner( + file: &SourceFile, + offset: TextUnit, + char_typed: char, +) -> Option { + assert!(TRIGGER_CHARS.contains(char_typed)); + match char_typed { + '.' => on_dot_typed(file, offset), + '=' => on_eq_typed(file, offset), + '>' => on_arrow_typed(file, offset), + _ => unreachable!(), + } +} + +/// Returns an edit which should be applied after `=` was typed. Primarily, +/// this works when adding `let =`. +// FIXME: use a snippet completion instead of this hack here. +fn on_eq_typed(file: &SourceFile, offset: TextUnit) -> Option { + assert_eq!(file.syntax().text().char_at(offset), Some('=')); + let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), offset)?; + if let_stmt.has_semi() { + return None; + } + if let Some(expr) = let_stmt.initializer() { + let expr_range = expr.syntax().text_range(); + if expr_range.contains(offset) && offset != expr_range.start() { + return None; + } + if file.syntax().text().slice(offset..expr_range.start()).contains_char('\n') { + return None; + } + } else { + return None; + } + let offset = let_stmt.syntax().text_range().end(); + Some(SingleFileChange { + label: "add semicolon".to_string(), + edit: TextEdit::insert(offset, ";".to_string()), + cursor_position: None, + }) +} + +/// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately. +fn on_dot_typed(file: &SourceFile, offset: TextUnit) -> Option { + assert_eq!(file.syntax().text().char_at(offset), Some('.')); + let whitespace = + file.syntax().token_at_offset(offset).left_biased().and_then(ast::Whitespace::cast)?; + + let current_indent = { + let text = whitespace.text(); + let newline = text.rfind('\n')?; + &text[newline + 1..] + }; + let current_indent_len = TextUnit::of_str(current_indent); + + // Make sure dot is a part of call chain + let field_expr = ast::FieldExpr::cast(whitespace.syntax().parent())?; + let prev_indent = leading_indent(field_expr.syntax())?; + let target_indent = format!(" {}", prev_indent); + let target_indent_len = TextUnit::of_str(&target_indent); + if current_indent_len == target_indent_len { + return None; + } + + Some(SingleFileChange { + label: "reindent dot".to_string(), + edit: TextEdit::replace( + TextRange::from_to(offset - current_indent_len, offset), + target_indent, + ), + cursor_position: Some( + offset + target_indent_len - current_indent_len + TextUnit::of_char('.'), + ), + }) +} + +/// Adds a space after an arrow when `fn foo() { ... }` is turned into `fn foo() -> { ... }` +fn on_arrow_typed(file: &SourceFile, offset: TextUnit) -> Option { + let file_text = file.syntax().text(); + assert_eq!(file_text.char_at(offset), Some('>')); + let after_arrow = offset + TextUnit::of_char('>'); + if file_text.char_at(after_arrow) != Some('{') { + return None; + } + if find_node_at_offset::(file.syntax(), offset).is_none() { + return None; + } + + Some(SingleFileChange { + label: "add space after return type".to_string(), + edit: TextEdit::insert(after_arrow, " ".to_string()), + cursor_position: Some(after_arrow), + }) +} + +#[cfg(test)] +mod tests { + use test_utils::{add_cursor, assert_eq_text, extract_offset}; + + use crate::mock_analysis::single_file; + + use super::*; + + #[test] + fn test_on_enter() { + fn apply_on_enter(before: &str) -> Option { + let (offset, before) = extract_offset(before); + let (analysis, file_id) = single_file(&before); + let result = analysis.on_enter(FilePosition { offset, file_id }).unwrap()?; + + assert_eq!(result.source_file_edits.len(), 1); + let actual = result.source_file_edits[0].edit.apply(&before); + let actual = add_cursor(&actual, result.cursor_position.unwrap().offset); + Some(actual) + } + + fn do_check(before: &str, after: &str) { + let actual = apply_on_enter(before).unwrap(); + assert_eq_text!(after, &actual); + } + + fn do_check_noop(text: &str) { + assert!(apply_on_enter(text).is_none()) + } + + do_check( + r" +/// Some docs<|> +fn foo() { +} +", + r" +/// Some docs +/// <|> +fn foo() { +} +", + ); + do_check( + r" +impl S { + /// Some<|> docs. + fn foo() {} +} +", + r" +impl S { + /// Some + /// <|> docs. + fn foo() {} +} +", + ); + do_check( + r" +fn main() { + // Fix<|> me + let x = 1 + 1; +} +", + r" +fn main() { + // Fix + // <|> me + let x = 1 + 1; +} +", + ); + do_check_noop( + r" +fn main() { + // Fix me<|> + let x = 1 + 1; +} +", + ); + + do_check_noop(r"<|>//! docz"); + } + + fn do_type_char(char_typed: char, before: &str) -> Option<(String, SingleFileChange)> { + let (offset, before) = extract_offset(before); + let edit = TextEdit::insert(offset, char_typed.to_string()); + let before = edit.apply(&before); + let parse = SourceFile::parse(&before); + on_char_typed_inner(&parse.tree(), offset, char_typed) + .map(|it| (it.edit.apply(&before), it)) + } + + fn type_char(char_typed: char, before: &str, after: &str) { + let (actual, file_change) = do_type_char(char_typed, before) + .unwrap_or_else(|| panic!("typing `{}` did nothing", char_typed)); + + if after.contains("<|>") { + let (offset, after) = extract_offset(after); + assert_eq_text!(&after, &actual); + assert_eq!(file_change.cursor_position, Some(offset)) + } else { + assert_eq_text!(after, &actual); + } + } + + fn type_char_noop(char_typed: char, before: &str) { + let file_change = do_type_char(char_typed, before); + assert!(file_change.is_none()) + } + + #[test] + fn test_on_eq_typed() { + // do_check(r" + // fn foo() { + // let foo =<|> + // } + // ", r" + // fn foo() { + // let foo =; + // } + // "); + type_char( + '=', + r" +fn foo() { + let foo <|> 1 + 1 +} +", + r" +fn foo() { + let foo = 1 + 1; +} +", + ); + // do_check(r" + // fn foo() { + // let foo =<|> + // let bar = 1; + // } + // ", r" + // fn foo() { + // let foo =; + // let bar = 1; + // } + // "); + } + + #[test] + fn indents_new_chain_call() { + type_char( + '.', + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + <|> + } + ", + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + . + } + ", + ); + type_char_noop( + '.', + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + <|> + } + ", + ) + } + + #[test] + fn indents_new_chain_call_with_semi() { + type_char( + '.', + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + <|>; + } + ", + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + .; + } + ", + ); + type_char_noop( + '.', + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + <|>; + } + ", + ) + } + + #[test] + fn indents_continued_chain_call() { + type_char( + '.', + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + .first() + <|> + } + ", + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + .first() + . + } + ", + ); + type_char_noop( + '.', + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + .first() + <|> + } + ", + ); + } + + #[test] + fn indents_middle_of_chain_call() { + type_char( + '.', + r" + fn source_impl() { + let var = enum_defvariant_list().unwrap() + <|> + .nth(92) + .unwrap(); + } + ", + r" + fn source_impl() { + let var = enum_defvariant_list().unwrap() + . + .nth(92) + .unwrap(); + } + ", + ); + type_char_noop( + '.', + r" + fn source_impl() { + let var = enum_defvariant_list().unwrap() + <|> + .nth(92) + .unwrap(); + } + ", + ); + } + + #[test] + fn dont_indent_freestanding_dot() { + type_char_noop( + '.', + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + <|> + } + ", + ); + type_char_noop( + '.', + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + <|> + } + ", + ); + } + + #[test] + fn adds_space_after_return_type() { + type_char('>', "fn foo() -<|>{ 92 }", "fn foo() -><|> { 92 }") + } +} diff --git a/crates/ra_ide/src/wasm_shims.rs b/crates/ra_ide/src/wasm_shims.rs new file mode 100644 index 000000000..088cc9be4 --- /dev/null +++ b/crates/ra_ide/src/wasm_shims.rs @@ -0,0 +1,19 @@ +//! FIXME: write short doc here + +#[cfg(not(feature = "wasm"))] +pub use std::time::Instant; + +#[cfg(feature = "wasm")] +#[derive(Clone, Copy, Debug)] +pub struct Instant; + +#[cfg(feature = "wasm")] +impl Instant { + pub fn now() -> Self { + Self + } + + pub fn elapsed(&self) -> std::time::Duration { + std::time::Duration::new(0, 0) + } +} -- cgit v1.2.3 From 27b362b05910c81fd5b28f6cd5d2c075311032f9 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Wed, 27 Nov 2019 21:44:38 +0300 Subject: Reformat --- crates/ra_ide/src/syntax_highlighting.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) (limited to 'crates/ra_ide/src') diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs index 2c568a747..9a3e4c82f 100644 --- a/crates/ra_ide/src/syntax_highlighting.rs +++ b/crates/ra_ide/src/syntax_highlighting.rs @@ -332,8 +332,7 @@ fn bar() { "# .trim(), ); - let dst_file = - project_dir().join("crates/ra_ide/src/snapshots/rainbow_highlighting.html"); + let dst_file = project_dir().join("crates/ra_ide/src/snapshots/rainbow_highlighting.html"); let actual_html = &analysis.highlight_as_html(file_id, true).unwrap(); let expected_html = &read_text(&dst_file); std::fs::write(dst_file, &actual_html).unwrap(); -- cgit v1.2.3 From 1d14fd17372b42c3343daf6adc9a520fdf5e9810 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Wed, 27 Nov 2019 23:22:20 +0300 Subject: Use Name::missing consistently --- crates/ra_ide/src/completion/presentation.rs | 5 +---- crates/ra_ide/src/display/function_signature.rs | 7 ++----- 2 files changed, 3 insertions(+), 9 deletions(-) (limited to 'crates/ra_ide/src') diff --git a/crates/ra_ide/src/completion/presentation.rs b/crates/ra_ide/src/completion/presentation.rs index 5f056730a..97475fc0b 100644 --- a/crates/ra_ide/src/completion/presentation.rs +++ b/crates/ra_ide/src/completion/presentation.rs @@ -267,10 +267,7 @@ impl Completions { pub(crate) fn add_enum_variant(&mut self, ctx: &CompletionContext, variant: hir::EnumVariant) { let is_deprecated = is_deprecated(variant, ctx.db); - let name = match variant.name(ctx.db) { - Some(it) => it, - None => return, - }; + let name = variant.name(ctx.db); let detail_types = variant.fields(ctx.db).into_iter().map(|field| field.ty(ctx.db)); let detail = join(detail_types.map(|t| t.display(ctx.db).to_string())) .separator(", ") diff --git a/crates/ra_ide/src/display/function_signature.rs b/crates/ra_ide/src/display/function_signature.rs index d96de4e4c..324ad9552 100644 --- a/crates/ra_ide/src/display/function_signature.rs +++ b/crates/ra_ide/src/display/function_signature.rs @@ -93,12 +93,9 @@ impl FunctionSignature { _ => (), }; - let parent_name = match variant.parent_enum(db).name(db) { - Some(name) => name.to_string(), - None => "missing".into(), - }; + let parent_name = variant.parent_enum(db).name(db).to_string(); - let name = format!("{}::{}", parent_name, variant.name(db).unwrap()); + let name = format!("{}::{}", parent_name, variant.name(db)); let params = variant .fields(db) -- cgit v1.2.3