From 1967884d6836219ee78a754ca5c66ac781351559 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Tue, 8 Jan 2019 22:17:36 +0300 Subject: rename ra_editor -> ra_ide_api_light --- crates/ra_editor/Cargo.toml | 19 - crates/ra_editor/src/assists.rs | 209 ------ crates/ra_editor/src/assists/add_derive.rs | 84 --- crates/ra_editor/src/assists/add_impl.rs | 66 -- crates/ra_editor/src/assists/change_visibility.rs | 116 --- crates/ra_editor/src/assists/flip_comma.rs | 31 - crates/ra_editor/src/assists/introduce_variable.rs | 144 ---- .../src/assists/replace_if_let_with_match.rs | 92 --- crates/ra_editor/src/assists/split_import.rs | 56 -- crates/ra_editor/src/diagnostics.rs | 266 ------- crates/ra_editor/src/extend_selection.rs | 281 ------- crates/ra_editor/src/folding_ranges.rs | 297 -------- crates/ra_editor/src/lib.rs | 168 ----- crates/ra_editor/src/line_index.rs | 399 ---------- crates/ra_editor/src/line_index_utils.rs | 363 --------- crates/ra_editor/src/structure.rs | 129 ---- crates/ra_editor/src/test_utils.rs | 41 - crates/ra_editor/src/typing.rs | 826 --------------------- crates/ra_ide_api_light/Cargo.toml | 19 + crates/ra_ide_api_light/src/assists.rs | 209 ++++++ crates/ra_ide_api_light/src/assists/add_derive.rs | 84 +++ crates/ra_ide_api_light/src/assists/add_impl.rs | 66 ++ .../src/assists/change_visibility.rs | 116 +++ crates/ra_ide_api_light/src/assists/flip_comma.rs | 31 + .../src/assists/introduce_variable.rs | 144 ++++ .../src/assists/replace_if_let_with_match.rs | 92 +++ .../ra_ide_api_light/src/assists/split_import.rs | 56 ++ crates/ra_ide_api_light/src/diagnostics.rs | 266 +++++++ crates/ra_ide_api_light/src/extend_selection.rs | 281 +++++++ crates/ra_ide_api_light/src/folding_ranges.rs | 297 ++++++++ crates/ra_ide_api_light/src/lib.rs | 168 +++++ crates/ra_ide_api_light/src/line_index.rs | 399 ++++++++++ crates/ra_ide_api_light/src/line_index_utils.rs | 363 +++++++++ crates/ra_ide_api_light/src/structure.rs | 129 ++++ crates/ra_ide_api_light/src/test_utils.rs | 41 + crates/ra_ide_api_light/src/typing.rs | 826 +++++++++++++++++++++ 36 files changed, 3587 insertions(+), 3587 deletions(-) delete mode 100644 crates/ra_editor/Cargo.toml delete mode 100644 crates/ra_editor/src/assists.rs delete mode 100644 crates/ra_editor/src/assists/add_derive.rs delete mode 100644 crates/ra_editor/src/assists/add_impl.rs delete mode 100644 crates/ra_editor/src/assists/change_visibility.rs delete mode 100644 crates/ra_editor/src/assists/flip_comma.rs delete mode 100644 crates/ra_editor/src/assists/introduce_variable.rs delete mode 100644 crates/ra_editor/src/assists/replace_if_let_with_match.rs delete mode 100644 crates/ra_editor/src/assists/split_import.rs delete mode 100644 crates/ra_editor/src/diagnostics.rs delete mode 100644 crates/ra_editor/src/extend_selection.rs delete mode 100644 crates/ra_editor/src/folding_ranges.rs delete mode 100644 crates/ra_editor/src/lib.rs delete mode 100644 crates/ra_editor/src/line_index.rs delete mode 100644 crates/ra_editor/src/line_index_utils.rs delete mode 100644 crates/ra_editor/src/structure.rs delete mode 100644 crates/ra_editor/src/test_utils.rs delete mode 100644 crates/ra_editor/src/typing.rs create mode 100644 crates/ra_ide_api_light/Cargo.toml create mode 100644 crates/ra_ide_api_light/src/assists.rs create mode 100644 crates/ra_ide_api_light/src/assists/add_derive.rs create mode 100644 crates/ra_ide_api_light/src/assists/add_impl.rs create mode 100644 crates/ra_ide_api_light/src/assists/change_visibility.rs create mode 100644 crates/ra_ide_api_light/src/assists/flip_comma.rs create mode 100644 crates/ra_ide_api_light/src/assists/introduce_variable.rs create mode 100644 crates/ra_ide_api_light/src/assists/replace_if_let_with_match.rs create mode 100644 crates/ra_ide_api_light/src/assists/split_import.rs create mode 100644 crates/ra_ide_api_light/src/diagnostics.rs create mode 100644 crates/ra_ide_api_light/src/extend_selection.rs create mode 100644 crates/ra_ide_api_light/src/folding_ranges.rs create mode 100644 crates/ra_ide_api_light/src/lib.rs create mode 100644 crates/ra_ide_api_light/src/line_index.rs create mode 100644 crates/ra_ide_api_light/src/line_index_utils.rs create mode 100644 crates/ra_ide_api_light/src/structure.rs create mode 100644 crates/ra_ide_api_light/src/test_utils.rs create mode 100644 crates/ra_ide_api_light/src/typing.rs (limited to 'crates') diff --git a/crates/ra_editor/Cargo.toml b/crates/ra_editor/Cargo.toml deleted file mode 100644 index a97d2308f..000000000 --- a/crates/ra_editor/Cargo.toml +++ /dev/null @@ -1,19 +0,0 @@ -[package] -edition = "2018" -name = "ra_editor" -version = "0.1.0" -authors = ["Aleksey Kladov "] -publish = false - -[dependencies] -itertools = "0.8.0" -superslice = "0.1.0" -join_to_string = "0.1.1" -rustc-hash = "1.0" - -ra_syntax = { path = "../ra_syntax" } -ra_text_edit = { path = "../ra_text_edit" } - -[dev-dependencies] -test_utils = { path = "../test_utils" } -proptest = "0.8.7" diff --git a/crates/ra_editor/src/assists.rs b/crates/ra_editor/src/assists.rs deleted file mode 100644 index 83eabfc85..000000000 --- a/crates/ra_editor/src/assists.rs +++ /dev/null @@ -1,209 +0,0 @@ -//! This modules contains various "assits": suggestions for source code edits -//! which are likely to occur at a given cursor positon. For example, if the -//! cursor is on the `,`, a possible assist is swapping the elments around the -//! comma. - -mod flip_comma; -mod add_derive; -mod add_impl; -mod introduce_variable; -mod change_visibility; -mod split_import; -mod replace_if_let_with_match; - -use ra_text_edit::{TextEdit, TextEditBuilder}; -use ra_syntax::{ - Direction, SyntaxNode, TextUnit, TextRange, SourceFile, AstNode, - algo::{find_leaf_at_offset, find_node_at_offset, find_covering_node, LeafAtOffset}, - ast::{self, AstToken}, -}; -use itertools::Itertools; - -pub use self::{ - flip_comma::flip_comma, - add_derive::add_derive, - add_impl::add_impl, - introduce_variable::introduce_variable, - change_visibility::change_visibility, - split_import::split_import, - replace_if_let_with_match::replace_if_let_with_match, -}; - -/// Return all the assists applicable at the given position. -pub fn assists(file: &SourceFile, range: TextRange) -> Vec { - let ctx = AssistCtx::new(file, range); - [ - flip_comma, - add_derive, - add_impl, - introduce_variable, - change_visibility, - split_import, - replace_if_let_with_match, - ] - .iter() - .filter_map(|&assist| ctx.clone().apply(assist)) - .collect() -} - -#[derive(Debug)] -pub struct LocalEdit { - pub label: String, - pub edit: TextEdit, - pub cursor_position: Option, -} - -fn non_trivia_sibling(node: &SyntaxNode, direction: Direction) -> Option<&SyntaxNode> { - node.siblings(direction) - .skip(1) - .find(|node| !node.kind().is_trivia()) -} - -/// `AssistCtx` allows to apply an assist or check if it could be applied. -/// -/// Assists use a somewhat overengeneered approach, given the current needs. The -/// assists workflow consists of two phases. In the first phase, a user asks for -/// the list of available assists. In the second phase, the user picks a -/// particular assist and it gets applied. -/// -/// There are two peculiarities here: -/// -/// * first, we ideally avoid computing more things then neccessary to answer -/// "is assist applicable" in the first phase. -/// * second, when we are appling assist, we don't have a gurantee that there -/// weren't any changes between the point when user asked for assists and when -/// they applied a particular assist. So, when applying assist, we need to do -/// all the checks from scratch. -/// -/// To avoid repeating the same code twice for both "check" and "apply" -/// functions, we use an approach remeniscent of that of Django's function based -/// views dealing with forms. Each assist receives a runtime parameter, -/// `should_compute_edit`. It first check if an edit is applicable (potentially -/// computing info required to compute the actual edit). If it is applicable, -/// and `should_compute_edit` is `true`, it then computes the actual edit. -/// -/// So, to implement the original assists workflow, we can first apply each edit -/// with `should_compute_edit = false`, and then applying the selected edit -/// again, with `should_compute_edit = true` this time. -/// -/// Note, however, that we don't actually use such two-phase logic at the -/// moment, because the LSP API is pretty awkward in this place, and it's much -/// easier to just compute the edit eagarly :-) -#[derive(Debug, Clone)] -pub struct AssistCtx<'a> { - source_file: &'a SourceFile, - range: TextRange, - should_compute_edit: bool, -} - -#[derive(Debug)] -pub enum Assist { - Applicable, - Edit(LocalEdit), -} - -#[derive(Default)] -struct AssistBuilder { - edit: TextEditBuilder, - cursor_position: Option, -} - -impl<'a> AssistCtx<'a> { - pub fn new(source_file: &'a SourceFile, range: TextRange) -> AssistCtx { - AssistCtx { - source_file, - range, - should_compute_edit: false, - } - } - - pub fn apply(mut self, assist: fn(AssistCtx) -> Option) -> Option { - self.should_compute_edit = true; - match assist(self) { - None => None, - Some(Assist::Edit(e)) => Some(e), - Some(Assist::Applicable) => unreachable!(), - } - } - - pub fn check(mut self, assist: fn(AssistCtx) -> Option) -> bool { - self.should_compute_edit = false; - match assist(self) { - None => false, - Some(Assist::Edit(_)) => unreachable!(), - Some(Assist::Applicable) => true, - } - } - - fn build(self, label: impl Into, f: impl FnOnce(&mut AssistBuilder)) -> Option { - if !self.should_compute_edit { - return Some(Assist::Applicable); - } - let mut edit = AssistBuilder::default(); - f(&mut edit); - Some(Assist::Edit(LocalEdit { - label: label.into(), - edit: edit.edit.finish(), - cursor_position: edit.cursor_position, - })) - } - - pub(crate) fn leaf_at_offset(&self) -> LeafAtOffset<&'a SyntaxNode> { - find_leaf_at_offset(self.source_file.syntax(), self.range.start()) - } - pub(crate) fn node_at_offset(&self) -> Option<&'a N> { - find_node_at_offset(self.source_file.syntax(), self.range.start()) - } - pub(crate) fn covering_node(&self) -> &'a SyntaxNode { - find_covering_node(self.source_file.syntax(), self.range) - } -} - -impl AssistBuilder { - fn replace(&mut self, range: TextRange, replace_with: impl Into) { - self.edit.replace(range, replace_with.into()) - } - fn replace_node_and_indent(&mut self, node: &SyntaxNode, replace_with: impl Into) { - let mut replace_with = replace_with.into(); - if let Some(indent) = calc_indent(node) { - replace_with = reindent(&replace_with, indent) - } - self.replace(node.range(), replace_with) - } - #[allow(unused)] - fn delete(&mut self, range: TextRange) { - self.edit.delete(range) - } - fn insert(&mut self, offset: TextUnit, text: impl Into) { - self.edit.insert(offset, text.into()) - } - fn set_cursor(&mut self, offset: TextUnit) { - self.cursor_position = Some(offset) - } -} - -fn calc_indent(node: &SyntaxNode) -> Option<&str> { - let prev = node.prev_sibling()?; - let ws_text = ast::Whitespace::cast(prev)?.text(); - ws_text.rfind('\n').map(|pos| &ws_text[pos + 1..]) -} - -fn reindent(text: &str, indent: &str) -> String { - let indent = format!("\n{}", indent); - text.lines().intersperse(&indent).collect() -} - -#[cfg(test)] -fn check_assist(assist: fn(AssistCtx) -> Option, before: &str, after: &str) { - crate::test_utils::check_action(before, after, |file, off| { - let range = TextRange::offset_len(off, 0.into()); - AssistCtx::new(file, range).apply(assist) - }) -} - -#[cfg(test)] -fn check_assist_range(assist: fn(AssistCtx) -> Option, before: &str, after: &str) { - crate::test_utils::check_action_range(before, after, |file, range| { - AssistCtx::new(file, range).apply(assist) - }) -} diff --git a/crates/ra_editor/src/assists/add_derive.rs b/crates/ra_editor/src/assists/add_derive.rs deleted file mode 100644 index 6e964d011..000000000 --- a/crates/ra_editor/src/assists/add_derive.rs +++ /dev/null @@ -1,84 +0,0 @@ -use ra_syntax::{ - ast::{self, AstNode, AttrsOwner}, - SyntaxKind::{WHITESPACE, COMMENT}, - TextUnit, -}; - -use crate::assists::{AssistCtx, Assist}; - -pub fn add_derive(ctx: AssistCtx) -> Option { - let nominal = ctx.node_at_offset::()?; - let node_start = derive_insertion_offset(nominal)?; - ctx.build("add `#[derive]`", |edit| { - let derive_attr = nominal - .attrs() - .filter_map(|x| x.as_call()) - .filter(|(name, _arg)| name == "derive") - .map(|(_name, arg)| arg) - .next(); - let offset = match derive_attr { - None => { - edit.insert(node_start, "#[derive()]\n"); - node_start + TextUnit::of_str("#[derive(") - } - Some(tt) => tt.syntax().range().end() - TextUnit::of_char(')'), - }; - edit.set_cursor(offset) - }) -} - -// Insert `derive` after doc comments. -fn derive_insertion_offset(nominal: &ast::NominalDef) -> Option { - let non_ws_child = nominal - .syntax() - .children() - .find(|it| it.kind() != COMMENT && it.kind() != WHITESPACE)?; - Some(non_ws_child.range().start()) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::assists::check_assist; - - #[test] - fn add_derive_new() { - check_assist( - add_derive, - "struct Foo { a: i32, <|>}", - "#[derive(<|>)]\nstruct Foo { a: i32, }", - ); - check_assist( - add_derive, - "struct Foo { <|> a: i32, }", - "#[derive(<|>)]\nstruct Foo { a: i32, }", - ); - } - - #[test] - fn add_derive_existing() { - check_assist( - add_derive, - "#[derive(Clone)]\nstruct Foo { a: i32<|>, }", - "#[derive(Clone<|>)]\nstruct Foo { a: i32, }", - ); - } - - #[test] - fn add_derive_new_with_doc_comment() { - check_assist( - add_derive, - " -/// `Foo` is a pretty important struct. -/// It does stuff. -struct Foo { a: i32<|>, } - ", - " -/// `Foo` is a pretty important struct. -/// It does stuff. -#[derive(<|>)] -struct Foo { a: i32, } - ", - ); - } -} diff --git a/crates/ra_editor/src/assists/add_impl.rs b/crates/ra_editor/src/assists/add_impl.rs deleted file mode 100644 index 2eda7cae2..000000000 --- a/crates/ra_editor/src/assists/add_impl.rs +++ /dev/null @@ -1,66 +0,0 @@ -use join_to_string::join; -use ra_syntax::{ - ast::{self, AstNode, AstToken, NameOwner, TypeParamsOwner}, - TextUnit, -}; - -use crate::assists::{AssistCtx, Assist}; - -pub fn add_impl(ctx: AssistCtx) -> Option { - let nominal = ctx.node_at_offset::()?; - let name = nominal.name()?; - ctx.build("add impl", |edit| { - let type_params = nominal.type_param_list(); - let start_offset = nominal.syntax().range().end(); - let mut buf = String::new(); - buf.push_str("\n\nimpl"); - if let Some(type_params) = type_params { - type_params.syntax().text().push_to(&mut buf); - } - buf.push_str(" "); - buf.push_str(name.text().as_str()); - if let Some(type_params) = type_params { - let lifetime_params = type_params - .lifetime_params() - .filter_map(|it| it.lifetime()) - .map(|it| it.text()); - let type_params = type_params - .type_params() - .filter_map(|it| it.name()) - .map(|it| it.text()); - join(lifetime_params.chain(type_params)) - .surround_with("<", ">") - .to_buf(&mut buf); - } - buf.push_str(" {\n"); - edit.set_cursor(start_offset + TextUnit::of_str(&buf)); - buf.push_str("\n}"); - edit.insert(start_offset, buf); - }) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::assists::check_assist; - - #[test] - fn test_add_impl() { - check_assist( - add_impl, - "struct Foo {<|>}\n", - "struct Foo {}\n\nimpl Foo {\n<|>\n}\n", - ); - check_assist( - add_impl, - "struct Foo {<|>}", - "struct Foo {}\n\nimpl Foo {\n<|>\n}", - ); - check_assist( - add_impl, - "struct Foo<'a, T: Foo<'a>> {<|>}", - "struct Foo<'a, T: Foo<'a>> {}\n\nimpl<'a, T: Foo<'a>> Foo<'a, T> {\n<|>\n}", - ); - } - -} diff --git a/crates/ra_editor/src/assists/change_visibility.rs b/crates/ra_editor/src/assists/change_visibility.rs deleted file mode 100644 index 89729e2c2..000000000 --- a/crates/ra_editor/src/assists/change_visibility.rs +++ /dev/null @@ -1,116 +0,0 @@ -use ra_syntax::{ - AstNode, - ast::{self, VisibilityOwner, NameOwner}, - SyntaxKind::{VISIBILITY, FN_KW, MOD_KW, STRUCT_KW, ENUM_KW, TRAIT_KW, FN_DEF, MODULE, STRUCT_DEF, ENUM_DEF, TRAIT_DEF, IDENT}, -}; - -use crate::assists::{AssistCtx, Assist}; - -pub fn change_visibility(ctx: AssistCtx) -> Option { - if let Some(vis) = ctx.node_at_offset::() { - return change_vis(ctx, vis); - } - add_vis(ctx) -} - -fn add_vis(ctx: AssistCtx) -> Option { - let item_keyword = ctx.leaf_at_offset().find(|leaf| match leaf.kind() { - FN_KW | MOD_KW | STRUCT_KW | ENUM_KW | TRAIT_KW => true, - _ => false, - }); - - let offset = if let Some(keyword) = item_keyword { - let parent = keyword.parent()?; - let def_kws = vec![FN_DEF, MODULE, STRUCT_DEF, ENUM_DEF, TRAIT_DEF]; - // Parent is not a definition, can't add visibility - if !def_kws.iter().any(|&def_kw| def_kw == parent.kind()) { - return None; - } - // Already have visibility, do nothing - if parent.children().any(|child| child.kind() == VISIBILITY) { - return None; - } - parent.range().start() - } else { - let ident = ctx.leaf_at_offset().find(|leaf| leaf.kind() == IDENT)?; - let field = ident.ancestors().find_map(ast::NamedFieldDef::cast)?; - if field.name()?.syntax().range() != ident.range() && field.visibility().is_some() { - return None; - } - field.syntax().range().start() - }; - - ctx.build("make pub(crate)", |edit| { - edit.insert(offset, "pub(crate) "); - edit.set_cursor(offset); - }) -} - -fn change_vis(ctx: AssistCtx, vis: &ast::Visibility) -> Option { - if vis.syntax().text() != "pub" { - return None; - } - ctx.build("chage to pub(crate)", |edit| { - edit.replace(vis.syntax().range(), "pub(crate)"); - edit.set_cursor(vis.syntax().range().start()); - }) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::assists::check_assist; - - #[test] - fn change_visibility_adds_pub_crate_to_items() { - check_assist( - change_visibility, - "<|>fn foo() {}", - "<|>pub(crate) fn foo() {}", - ); - check_assist( - change_visibility, - "f<|>n foo() {}", - "<|>pub(crate) fn foo() {}", - ); - check_assist( - change_visibility, - "<|>struct Foo {}", - "<|>pub(crate) struct Foo {}", - ); - check_assist( - change_visibility, - "<|>mod foo {}", - "<|>pub(crate) mod foo {}", - ); - check_assist( - change_visibility, - "<|>trait Foo {}", - "<|>pub(crate) trait Foo {}", - ); - check_assist(change_visibility, "m<|>od {}", "<|>pub(crate) mod {}"); - check_assist( - change_visibility, - "unsafe f<|>n foo() {}", - "<|>pub(crate) unsafe fn foo() {}", - ); - } - - #[test] - fn change_visibility_works_with_struct_fields() { - check_assist( - change_visibility, - "struct S { <|>field: u32 }", - "struct S { <|>pub(crate) field: u32 }", - ) - } - - #[test] - fn change_visibility_pub_to_pub_crate() { - check_assist( - change_visibility, - "<|>pub fn foo() {}", - "<|>pub(crate) fn foo() {}", - ) - } -} diff --git a/crates/ra_editor/src/assists/flip_comma.rs b/crates/ra_editor/src/assists/flip_comma.rs deleted file mode 100644 index a343413cc..000000000 --- a/crates/ra_editor/src/assists/flip_comma.rs +++ /dev/null @@ -1,31 +0,0 @@ -use ra_syntax::{ - Direction, - SyntaxKind::COMMA, -}; - -use crate::assists::{non_trivia_sibling, AssistCtx, Assist}; - -pub fn flip_comma(ctx: AssistCtx) -> Option { - let comma = ctx.leaf_at_offset().find(|leaf| leaf.kind() == COMMA)?; - let prev = non_trivia_sibling(comma, Direction::Prev)?; - let next = non_trivia_sibling(comma, Direction::Next)?; - ctx.build("flip comma", |edit| { - edit.replace(prev.range(), next.text()); - edit.replace(next.range(), prev.text()); - }) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::assists::check_assist; - - #[test] - fn flip_comma_works_for_function_parameters() { - check_assist( - flip_comma, - "fn foo(x: i32,<|> y: Result<(), ()>) {}", - "fn foo(y: Result<(), ()>,<|> x: i32) {}", - ) - } -} diff --git a/crates/ra_editor/src/assists/introduce_variable.rs b/crates/ra_editor/src/assists/introduce_variable.rs deleted file mode 100644 index 523ec7034..000000000 --- a/crates/ra_editor/src/assists/introduce_variable.rs +++ /dev/null @@ -1,144 +0,0 @@ -use ra_syntax::{ - ast::{self, AstNode}, - SyntaxKind::WHITESPACE, - SyntaxNode, TextUnit, -}; - -use crate::assists::{AssistCtx, Assist}; - -pub fn introduce_variable<'a>(ctx: AssistCtx) -> Option { - let node = ctx.covering_node(); - let expr = node.ancestors().filter_map(ast::Expr::cast).next()?; - - let anchor_stmt = anchor_stmt(expr)?; - let indent = anchor_stmt.prev_sibling()?; - if indent.kind() != WHITESPACE { - return None; - } - ctx.build("introduce variable", move |edit| { - let mut buf = String::new(); - - buf.push_str("let var_name = "); - expr.syntax().text().push_to(&mut buf); - let is_full_stmt = if let Some(expr_stmt) = ast::ExprStmt::cast(anchor_stmt) { - Some(expr.syntax()) == expr_stmt.expr().map(|e| e.syntax()) - } else { - false - }; - if is_full_stmt { - edit.replace(expr.syntax().range(), buf); - } else { - buf.push_str(";"); - indent.text().push_to(&mut buf); - edit.replace(expr.syntax().range(), "var_name".to_string()); - edit.insert(anchor_stmt.range().start(), buf); - } - edit.set_cursor(anchor_stmt.range().start() + TextUnit::of_str("let ")); - }) -} - -/// Statement or last in the block expression, which will follow -/// the freshly introduced var. -fn anchor_stmt(expr: &ast::Expr) -> Option<&SyntaxNode> { - expr.syntax().ancestors().find(|&node| { - if ast::Stmt::cast(node).is_some() { - return true; - } - if let Some(expr) = node - .parent() - .and_then(ast::Block::cast) - .and_then(|it| it.expr()) - { - if expr.syntax() == node { - return true; - } - } - false - }) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::assists::check_assist_range; - - #[test] - fn test_introduce_var_simple() { - check_assist_range( - introduce_variable, - " -fn foo() { - foo(<|>1 + 1<|>); -}", - " -fn foo() { - let <|>var_name = 1 + 1; - foo(var_name); -}", - ); - } - - #[test] - fn test_introduce_var_expr_stmt() { - check_assist_range( - introduce_variable, - " -fn foo() { - <|>1 + 1<|>; -}", - " -fn foo() { - let <|>var_name = 1 + 1; -}", - ); - } - - #[test] - fn test_introduce_var_part_of_expr_stmt() { - check_assist_range( - introduce_variable, - " -fn foo() { - <|>1<|> + 1; -}", - " -fn foo() { - let <|>var_name = 1; - var_name + 1; -}", - ); - } - - #[test] - fn test_introduce_var_last_expr() { - check_assist_range( - introduce_variable, - " -fn foo() { - bar(<|>1 + 1<|>) -}", - " -fn foo() { - let <|>var_name = 1 + 1; - bar(var_name) -}", - ); - } - - #[test] - fn test_introduce_var_last_full_expr() { - check_assist_range( - introduce_variable, - " -fn foo() { - <|>bar(1 + 1)<|> -}", - " -fn foo() { - let <|>var_name = bar(1 + 1); - var_name -}", - ); - } - -} diff --git a/crates/ra_editor/src/assists/replace_if_let_with_match.rs b/crates/ra_editor/src/assists/replace_if_let_with_match.rs deleted file mode 100644 index 30c371480..000000000 --- a/crates/ra_editor/src/assists/replace_if_let_with_match.rs +++ /dev/null @@ -1,92 +0,0 @@ -use ra_syntax::{ - AstNode, SyntaxKind::{L_CURLY, R_CURLY, WHITESPACE}, - ast, -}; - -use crate::assists::{AssistCtx, Assist}; - -pub fn replace_if_let_with_match(ctx: AssistCtx) -> Option { - let if_expr: &ast::IfExpr = ctx.node_at_offset()?; - let cond = if_expr.condition()?; - let pat = cond.pat()?; - let expr = cond.expr()?; - let then_block = if_expr.then_branch()?; - let else_block = if_expr.else_branch()?; - - ctx.build("replace with match", |edit| { - let match_expr = build_match_expr(expr, pat, then_block, else_block); - edit.replace_node_and_indent(if_expr.syntax(), match_expr); - edit.set_cursor(if_expr.syntax().range().start()) - }) -} - -fn build_match_expr( - expr: &ast::Expr, - pat1: &ast::Pat, - arm1: &ast::Block, - arm2: &ast::Block, -) -> String { - let mut buf = String::new(); - buf.push_str(&format!("match {} {{\n", expr.syntax().text())); - buf.push_str(&format!( - " {} => {}\n", - pat1.syntax().text(), - format_arm(arm1) - )); - buf.push_str(&format!(" _ => {}\n", format_arm(arm2))); - buf.push_str("}"); - buf -} - -fn format_arm(block: &ast::Block) -> String { - match extract_expression(block) { - None => block.syntax().text().to_string(), - Some(e) => format!("{},", e.syntax().text()), - } -} - -fn extract_expression(block: &ast::Block) -> Option<&ast::Expr> { - let expr = block.expr()?; - let non_trivial_children = block.syntax().children().filter(|it| { - !(it == &expr.syntax() - || it.kind() == L_CURLY - || it.kind() == R_CURLY - || it.kind() == WHITESPACE) - }); - if non_trivial_children.count() > 0 { - return None; - } - Some(expr) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::assists::check_assist; - - #[test] - fn test_replace_if_let_with_match_unwraps_simple_expressions() { - check_assist( - replace_if_let_with_match, - " -impl VariantData { - pub fn is_struct(&self) -> bool { - if <|>let VariantData::Struct(..) = *self { - true - } else { - false - } - } -} ", - " -impl VariantData { - pub fn is_struct(&self) -> bool { - <|>match *self { - VariantData::Struct(..) => true, - _ => false, - } - } -} ", - ) - } -} diff --git a/crates/ra_editor/src/assists/split_import.rs b/crates/ra_editor/src/assists/split_import.rs deleted file mode 100644 index e4015f07d..000000000 --- a/crates/ra_editor/src/assists/split_import.rs +++ /dev/null @@ -1,56 +0,0 @@ -use ra_syntax::{ - TextUnit, AstNode, SyntaxKind::COLONCOLON, - ast, - algo::generate, -}; - -use crate::assists::{AssistCtx, Assist}; - -pub fn split_import(ctx: AssistCtx) -> Option { - let colon_colon = ctx - .leaf_at_offset() - .find(|leaf| leaf.kind() == COLONCOLON)?; - let path = colon_colon.parent().and_then(ast::Path::cast)?; - let top_path = generate(Some(path), |it| it.parent_path()).last()?; - - let use_tree = top_path.syntax().ancestors().find_map(ast::UseTree::cast); - if use_tree.is_none() { - return None; - } - - let l_curly = colon_colon.range().end(); - let r_curly = match top_path.syntax().parent().and_then(ast::UseTree::cast) { - Some(tree) => tree.syntax().range().end(), - None => top_path.syntax().range().end(), - }; - - ctx.build("split import", |edit| { - edit.insert(l_curly, "{"); - edit.insert(r_curly, "}"); - edit.set_cursor(l_curly + TextUnit::of_str("{")); - }) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::assists::check_assist; - - #[test] - fn test_split_import() { - check_assist( - split_import, - "use crate::<|>db::RootDatabase;", - "use crate::{<|>db::RootDatabase};", - ) - } - - #[test] - fn split_import_works_with_trees() { - check_assist( - split_import, - "use algo:<|>:visitor::{Visitor, visit}", - "use algo::{<|>visitor::{Visitor, visit}}", - ) - } -} diff --git a/crates/ra_editor/src/diagnostics.rs b/crates/ra_editor/src/diagnostics.rs deleted file mode 100644 index 2b695dfdf..000000000 --- a/crates/ra_editor/src/diagnostics.rs +++ /dev/null @@ -1,266 +0,0 @@ -use itertools::Itertools; - -use ra_syntax::{ - Location, SourceFile, SyntaxKind, TextRange, SyntaxNode, - ast::{self, AstNode}, - -}; -use ra_text_edit::{TextEdit, TextEditBuilder}; - -use crate::{Diagnostic, LocalEdit, Severity}; - -pub fn diagnostics(file: &SourceFile) -> Vec { - fn location_to_range(location: Location) -> TextRange { - match location { - Location::Offset(offset) => TextRange::offset_len(offset, 1.into()), - Location::Range(range) => range, - } - } - - let mut errors: Vec = file - .errors() - .into_iter() - .map(|err| Diagnostic { - range: location_to_range(err.location()), - msg: format!("Syntax Error: {}", err), - severity: Severity::Error, - fix: None, - }) - .collect(); - - for node in file.syntax().descendants() { - check_unnecessary_braces_in_use_statement(&mut errors, node); - check_struct_shorthand_initialization(&mut errors, node); - } - - errors -} - -fn check_unnecessary_braces_in_use_statement( - acc: &mut Vec, - node: &SyntaxNode, -) -> Option<()> { - let use_tree_list = ast::UseTreeList::cast(node)?; - if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() { - let range = use_tree_list.syntax().range(); - let edit = - text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(single_use_tree) - .unwrap_or_else(|| { - let to_replace = single_use_tree.syntax().text().to_string(); - let mut edit_builder = TextEditBuilder::default(); - edit_builder.delete(range); - edit_builder.insert(range.start(), to_replace); - edit_builder.finish() - }); - - acc.push(Diagnostic { - range, - msg: format!("Unnecessary braces in use statement"), - severity: Severity::WeakWarning, - fix: Some(LocalEdit { - label: "Remove unnecessary braces".to_string(), - edit, - cursor_position: None, - }), - }); - } - - Some(()) -} - -fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement( - single_use_tree: &ast::UseTree, -) -> Option { - let use_tree_list_node = single_use_tree.syntax().parent()?; - if single_use_tree - .path()? - .segment()? - .syntax() - .first_child()? - .kind() - == SyntaxKind::SELF_KW - { - let start = use_tree_list_node.prev_sibling()?.range().start(); - let end = use_tree_list_node.range().end(); - let range = TextRange::from_to(start, end); - let mut edit_builder = TextEditBuilder::default(); - edit_builder.delete(range); - return Some(edit_builder.finish()); - } - None -} - -fn check_struct_shorthand_initialization( - acc: &mut Vec, - node: &SyntaxNode, -) -> Option<()> { - let struct_lit = ast::StructLit::cast(node)?; - let named_field_list = struct_lit.named_field_list()?; - for named_field in named_field_list.fields() { - if let (Some(name_ref), Some(expr)) = (named_field.name_ref(), named_field.expr()) { - let field_name = name_ref.syntax().text().to_string(); - let field_expr = expr.syntax().text().to_string(); - if field_name == field_expr { - let mut edit_builder = TextEditBuilder::default(); - edit_builder.delete(named_field.syntax().range()); - edit_builder.insert(named_field.syntax().range().start(), field_name); - let edit = edit_builder.finish(); - - acc.push(Diagnostic { - range: named_field.syntax().range(), - msg: format!("Shorthand struct initialization"), - severity: Severity::WeakWarning, - fix: Some(LocalEdit { - label: "use struct shorthand initialization".to_string(), - edit, - cursor_position: None, - }), - }); - } - } - } - Some(()) -} - -#[cfg(test)] -mod tests { - use crate::test_utils::assert_eq_text; - - use super::*; - - type DiagnosticChecker = fn(&mut Vec, &SyntaxNode) -> Option<()>; - - fn check_not_applicable(code: &str, func: DiagnosticChecker) { - let file = SourceFile::parse(code); - let mut diagnostics = Vec::new(); - for node in file.syntax().descendants() { - func(&mut diagnostics, node); - } - assert!(diagnostics.is_empty()); - } - - fn check_apply(before: &str, after: &str, func: DiagnosticChecker) { - let file = SourceFile::parse(before); - let mut diagnostics = Vec::new(); - for node in file.syntax().descendants() { - func(&mut diagnostics, node); - } - let diagnostic = diagnostics - .pop() - .unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before)); - let fix = diagnostic.fix.unwrap(); - let actual = fix.edit.apply(&before); - assert_eq_text!(after, &actual); - } - - #[test] - fn test_check_unnecessary_braces_in_use_statement() { - check_not_applicable( - " - use a; - use a::{c, d::e}; - ", - check_unnecessary_braces_in_use_statement, - ); - check_apply( - "use {b};", - "use b;", - check_unnecessary_braces_in_use_statement, - ); - check_apply( - "use a::{c};", - "use a::c;", - check_unnecessary_braces_in_use_statement, - ); - check_apply( - "use a::{self};", - "use a;", - check_unnecessary_braces_in_use_statement, - ); - check_apply( - "use a::{c, d::{e}};", - "use a::{c, d::e};", - check_unnecessary_braces_in_use_statement, - ); - } - - #[test] - fn test_check_struct_shorthand_initialization() { - check_not_applicable( - r#" - struct A { - a: &'static str - } - - fn main() { - A { - a: "hello" - } - } - "#, - check_struct_shorthand_initialization, - ); - - check_apply( - r#" -struct A { - a: &'static str -} - -fn main() { - let a = "haha"; - A { - a: a - } -} - "#, - r#" -struct A { - a: &'static str -} - -fn main() { - let a = "haha"; - A { - a - } -} - "#, - check_struct_shorthand_initialization, - ); - - check_apply( - r#" -struct A { - a: &'static str, - b: &'static str -} - -fn main() { - let a = "haha"; - let b = "bb"; - A { - a: a, - b - } -} - "#, - r#" -struct A { - a: &'static str, - b: &'static str -} - -fn main() { - let a = "haha"; - let b = "bb"; - A { - a, - b - } -} - "#, - check_struct_shorthand_initialization, - ); - } -} diff --git a/crates/ra_editor/src/extend_selection.rs b/crates/ra_editor/src/extend_selection.rs deleted file mode 100644 index 08cae5a51..000000000 --- a/crates/ra_editor/src/extend_selection.rs +++ /dev/null @@ -1,281 +0,0 @@ -use ra_syntax::{ - Direction, SyntaxNode, TextRange, TextUnit, - algo::{find_covering_node, find_leaf_at_offset, LeafAtOffset}, - SyntaxKind::*, -}; - -pub fn extend_selection(root: &SyntaxNode, range: TextRange) -> Option { - let string_kinds = [COMMENT, STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING]; - if range.is_empty() { - let offset = range.start(); - let mut leaves = find_leaf_at_offset(root, offset); - if leaves.clone().all(|it| it.kind() == WHITESPACE) { - return Some(extend_ws(root, leaves.next()?, offset)); - } - let leaf_range = match leaves { - LeafAtOffset::None => return None, - LeafAtOffset::Single(l) => { - if string_kinds.contains(&l.kind()) { - extend_single_word_in_comment_or_string(l, offset).unwrap_or_else(|| l.range()) - } else { - l.range() - } - } - LeafAtOffset::Between(l, r) => pick_best(l, r).range(), - }; - return Some(leaf_range); - }; - let node = find_covering_node(root, range); - if string_kinds.contains(&node.kind()) && range == node.range() { - if let Some(range) = extend_comments(node) { - return Some(range); - } - } - - match node.ancestors().skip_while(|n| n.range() == range).next() { - None => None, - Some(parent) => Some(parent.range()), - } -} - -fn extend_single_word_in_comment_or_string( - leaf: &SyntaxNode, - offset: TextUnit, -) -> Option { - let text: &str = leaf.leaf_text()?; - let cursor_position: u32 = (offset - leaf.range().start()).into(); - - let (before, after) = text.split_at(cursor_position as usize); - - fn non_word_char(c: char) -> bool { - !(c.is_alphanumeric() || c == '_') - } - - let start_idx = before.rfind(non_word_char)? as u32; - let end_idx = after.find(non_word_char).unwrap_or(after.len()) as u32; - - let from: TextUnit = (start_idx + 1).into(); - let to: TextUnit = (cursor_position + end_idx).into(); - - let range = TextRange::from_to(from, to); - if range.is_empty() { - None - } else { - Some(range + leaf.range().start()) - } -} - -fn extend_ws(root: &SyntaxNode, ws: &SyntaxNode, offset: TextUnit) -> TextRange { - let ws_text = ws.leaf_text().unwrap(); - let suffix = TextRange::from_to(offset, ws.range().end()) - ws.range().start(); - let prefix = TextRange::from_to(ws.range().start(), offset) - ws.range().start(); - let ws_suffix = &ws_text.as_str()[suffix]; - let ws_prefix = &ws_text.as_str()[prefix]; - if ws_text.contains('\n') && !ws_suffix.contains('\n') { - if let Some(node) = ws.next_sibling() { - let start = match ws_prefix.rfind('\n') { - Some(idx) => ws.range().start() + TextUnit::from((idx + 1) as u32), - None => node.range().start(), - }; - let end = if root.text().char_at(node.range().end()) == Some('\n') { - node.range().end() + TextUnit::of_char('\n') - } else { - node.range().end() - }; - return TextRange::from_to(start, end); - } - } - ws.range() -} - -fn pick_best<'a>(l: &'a SyntaxNode, r: &'a SyntaxNode) -> &'a SyntaxNode { - return if priority(r) > priority(l) { r } else { l }; - fn priority(n: &SyntaxNode) -> usize { - match n.kind() { - WHITESPACE => 0, - IDENT | SELF_KW | SUPER_KW | CRATE_KW | LIFETIME => 2, - _ => 1, - } - } -} - -fn extend_comments(node: &SyntaxNode) -> Option { - let prev = adj_comments(node, Direction::Prev); - let next = adj_comments(node, Direction::Next); - if prev != next { - Some(TextRange::from_to(prev.range().start(), next.range().end())) - } else { - None - } -} - -fn adj_comments(node: &SyntaxNode, dir: Direction) -> &SyntaxNode { - let mut res = node; - for node in node.siblings(dir) { - match node.kind() { - COMMENT => res = node, - WHITESPACE if !node.leaf_text().unwrap().as_str().contains("\n\n") => (), - _ => break, - } - } - res -} - -#[cfg(test)] -mod tests { - use ra_syntax::{SourceFile, AstNode}; - use test_utils::extract_offset; - - use super::*; - - fn do_check(before: &str, afters: &[&str]) { - let (cursor, before) = extract_offset(before); - let file = SourceFile::parse(&before); - let mut range = TextRange::offset_len(cursor, 0.into()); - for &after in afters { - range = extend_selection(file.syntax(), range).unwrap(); - let actual = &before[range]; - assert_eq!(after, actual); - } - } - - #[test] - fn test_extend_selection_arith() { - do_check(r#"fn foo() { <|>1 + 1 }"#, &["1", "1 + 1", "{ 1 + 1 }"]); - } - - #[test] - fn test_extend_selection_start_of_the_lind() { - do_check( - r#" -impl S { -<|> fn foo() { - - } -}"#, - &[" fn foo() {\n\n }\n"], - ); - } - - #[test] - fn test_extend_selection_doc_comments() { - do_check( - r#" -struct A; - -/// bla -/// bla -struct B { - <|> -} - "#, - &[ - "\n \n", - "{\n \n}", - "/// bla\n/// bla\nstruct B {\n \n}", - ], - ) - } - - #[test] - fn test_extend_selection_comments() { - do_check( - r#" -fn bar(){} - -// fn foo() { -// 1 + <|>1 -// } - -// fn foo(){} - "#, - &["1", "// 1 + 1", "// fn foo() {\n// 1 + 1\n// }"], - ); - - do_check( - r#" -// #[derive(Debug, Clone, Copy, PartialEq, Eq)] -// pub enum Direction { -// <|> Next, -// Prev -// } -"#, - &[ - "// Next,", - "// #[derive(Debug, Clone, Copy, PartialEq, Eq)]\n// pub enum Direction {\n// Next,\n// Prev\n// }", - ], - ); - - do_check( - r#" -/* -foo -_bar1<|>*/ - "#, - &["_bar1", "/*\nfoo\n_bar1*/"], - ); - - do_check( - r#" -//!<|>foo_2 bar - "#, - &["foo_2", "//!foo_2 bar"], - ); - - do_check( - r#" -/<|>/foo bar - "#, - &["//foo bar"], - ); - } - - #[test] - fn test_extend_selection_prefer_idents() { - do_check( - r#" -fn main() { foo<|>+bar;} - "#, - &["foo", "foo+bar"], - ); - do_check( - r#" -fn main() { foo+<|>bar;} - "#, - &["bar", "foo+bar"], - ); - } - - #[test] - fn test_extend_selection_prefer_lifetimes() { - do_check(r#"fn foo<<|>'a>() {}"#, &["'a", "<'a>"]); - do_check(r#"fn foo<'a<|>>() {}"#, &["'a", "<'a>"]); - } - - #[test] - fn test_extend_selection_select_first_word() { - do_check(r#"// foo bar b<|>az quxx"#, &["baz", "// foo bar baz quxx"]); - do_check( - r#" -impl S { - fn foo() { - // hel<|>lo world - } -} - "#, - &["hello", "// hello world"], - ); - } - - #[test] - fn test_extend_selection_string() { - do_check( - r#" -fn bar(){} - -" fn f<|>oo() {" - "#, - &["foo", "\" fn foo() {\""], - ); - } -} diff --git a/crates/ra_editor/src/folding_ranges.rs b/crates/ra_editor/src/folding_ranges.rs deleted file mode 100644 index 6f3106889..000000000 --- a/crates/ra_editor/src/folding_ranges.rs +++ /dev/null @@ -1,297 +0,0 @@ -use rustc_hash::FxHashSet; - -use ra_syntax::{ - ast, AstNode, Direction, SourceFile, SyntaxNode, TextRange, - SyntaxKind::{self, *}, -}; - -#[derive(Debug, PartialEq, Eq)] -pub enum FoldKind { - Comment, - Imports, - Block, -} - -#[derive(Debug)] -pub struct Fold { - pub range: TextRange, - pub kind: FoldKind, -} - -pub fn folding_ranges(file: &SourceFile) -> Vec { - let mut res = vec![]; - let mut visited_comments = FxHashSet::default(); - let mut visited_imports = FxHashSet::default(); - - for node in file.syntax().descendants() { - // Fold items that span multiple lines - if let Some(kind) = fold_kind(node.kind()) { - if has_newline(node) { - res.push(Fold { - range: node.range(), - kind, - }); - } - } - - // Fold groups of comments - if node.kind() == COMMENT && !visited_comments.contains(&node) { - if let Some(range) = contiguous_range_for_comment(node, &mut visited_comments) { - res.push(Fold { - range, - kind: FoldKind::Comment, - }) - } - } - - // Fold groups of imports - if node.kind() == USE_ITEM && !visited_imports.contains(&node) { - if let Some(range) = contiguous_range_for_group(node, &mut visited_imports) { - res.push(Fold { - range, - kind: FoldKind::Imports, - }) - } - } - } - - res -} - -fn fold_kind(kind: SyntaxKind) -> Option { - match kind { - COMMENT => Some(FoldKind::Comment), - USE_ITEM => Some(FoldKind::Imports), - NAMED_FIELD_DEF_LIST | FIELD_PAT_LIST | ITEM_LIST | EXTERN_ITEM_LIST | USE_TREE_LIST - | BLOCK | ENUM_VARIANT_LIST => Some(FoldKind::Block), - _ => None, - } -} - -fn has_newline(node: &SyntaxNode) -> bool { - for descendant in node.descendants() { - if let Some(ws) = ast::Whitespace::cast(descendant) { - if ws.has_newlines() { - return true; - } - } else if let Some(comment) = ast::Comment::cast(descendant) { - if comment.has_newlines() { - return true; - } - } - } - - false -} - -fn contiguous_range_for_group<'a>( - first: &'a SyntaxNode, - visited: &mut FxHashSet<&'a SyntaxNode>, -) -> Option { - visited.insert(first); - - let mut last = first; - for node in first.siblings(Direction::Next) { - if let Some(ws) = ast::Whitespace::cast(node) { - // There is a blank line, which means that the group ends here - if ws.count_newlines_lazy().take(2).count() == 2 { - break; - } - - // Ignore whitespace without blank lines - continue; - } - - // Stop if we find a node that doesn't belong to the group - if node.kind() != first.kind() { - break; - } - - visited.insert(node); - last = node; - } - - if first != last { - Some(TextRange::from_to( - first.range().start(), - last.range().end(), - )) - } else { - // The group consists of only one element, therefore it cannot be folded - None - } -} - -fn contiguous_range_for_comment<'a>( - first: &'a SyntaxNode, - visited: &mut FxHashSet<&'a SyntaxNode>, -) -> Option { - visited.insert(first); - - // Only fold comments of the same flavor - let group_flavor = ast::Comment::cast(first)?.flavor(); - - let mut last = first; - for node in first.siblings(Direction::Next) { - if let Some(ws) = ast::Whitespace::cast(node) { - // There is a blank line, which means the group ends here - if ws.count_newlines_lazy().take(2).count() == 2 { - break; - } - - // Ignore whitespace without blank lines - continue; - } - - match ast::Comment::cast(node) { - Some(next_comment) if next_comment.flavor() == group_flavor => { - visited.insert(node); - last = node; - } - // The comment group ends because either: - // * An element of a different kind was reached - // * A comment of a different flavor was reached - _ => break, - } - } - - if first != last { - Some(TextRange::from_to( - first.range().start(), - last.range().end(), - )) - } else { - // The group consists of only one element, therefore it cannot be folded - None - } -} - -#[cfg(test)] -mod tests { - use super::*; - use test_utils::extract_ranges; - - fn do_check(text: &str, fold_kinds: &[FoldKind]) { - let (ranges, text) = extract_ranges(text, "fold"); - let file = SourceFile::parse(&text); - let folds = folding_ranges(&file); - - assert_eq!( - folds.len(), - ranges.len(), - "The amount of folds is different than the expected amount" - ); - assert_eq!( - folds.len(), - fold_kinds.len(), - "The amount of fold kinds is different than the expected amount" - ); - for ((fold, range), fold_kind) in folds - .into_iter() - .zip(ranges.into_iter()) - .zip(fold_kinds.into_iter()) - { - assert_eq!(fold.range.start(), range.start()); - assert_eq!(fold.range.end(), range.end()); - assert_eq!(&fold.kind, fold_kind); - } - } - - #[test] - fn test_fold_comments() { - let text = r#" -// Hello -// this is a multiline -// comment -// - -// But this is not - -fn main() { - // We should - // also - // fold - // this one. - //! But this one is different - //! because it has another flavor - /* As does this - multiline comment */ -}"#; - - let fold_kinds = &[ - FoldKind::Comment, - FoldKind::Block, - FoldKind::Comment, - FoldKind::Comment, - FoldKind::Comment, - ]; - do_check(text, fold_kinds); - } - - #[test] - fn test_fold_imports() { - let text = r#" -use std::{ - str, - vec, - io as iop -}; - -fn main() { -}"#; - - let folds = &[FoldKind::Imports, FoldKind::Block, FoldKind::Block]; - do_check(text, folds); - } - - #[test] - fn test_fold_import_groups() { - let text = r#" -use std::str; -use std::vec; -use std::io as iop; - -use std::mem; -use std::f64; - -use std::collections::HashMap; -// Some random comment -use std::collections::VecDeque; - -fn main() { -}"#; - - let folds = &[FoldKind::Imports, FoldKind::Imports, FoldKind::Block]; - do_check(text, folds); - } - - #[test] - fn test_fold_import_and_groups() { - let text = r#" -use std::str; -use std::vec; -use std::io as iop; - -use std::mem; -use std::f64; - -use std::collections::{ - HashMap, - VecDeque, -}; -// Some random comment - -fn main() { -}"#; - - let folds = &[ - FoldKind::Imports, - FoldKind::Imports, - FoldKind::Imports, - FoldKind::Block, - FoldKind::Block, - ]; - do_check(text, folds); - } - -} diff --git a/crates/ra_editor/src/lib.rs b/crates/ra_editor/src/lib.rs deleted file mode 100644 index 5a6af19b7..000000000 --- a/crates/ra_editor/src/lib.rs +++ /dev/null @@ -1,168 +0,0 @@ -pub mod assists; -mod extend_selection; -mod folding_ranges; -mod line_index; -mod line_index_utils; -mod structure; -#[cfg(test)] -mod test_utils; -mod typing; -mod diagnostics; - -pub use self::{ - assists::LocalEdit, - extend_selection::extend_selection, - folding_ranges::{folding_ranges, Fold, FoldKind}, - line_index::{LineCol, LineIndex}, - line_index_utils::translate_offset_with_edit, - structure::{file_structure, StructureNode}, - typing::{join_lines, on_enter, on_dot_typed, on_eq_typed}, - diagnostics::diagnostics -}; -use ra_text_edit::TextEditBuilder; -use ra_syntax::{ - SourceFile, SyntaxNode, TextRange, TextUnit, Direction, - SyntaxKind::{self, *}, - ast::{self, AstNode}, - algo::find_leaf_at_offset, -}; -use rustc_hash::FxHashSet; - -#[derive(Debug)] -pub struct HighlightedRange { - pub range: TextRange, - pub tag: &'static str, -} - -#[derive(Debug, Copy, Clone)] -pub enum Severity { - Error, - WeakWarning, -} - -#[derive(Debug)] -pub struct Diagnostic { - pub range: TextRange, - pub msg: String, - pub severity: Severity, - pub fix: Option, -} - -pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option { - const BRACES: &[SyntaxKind] = &[ - L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE, - ]; - let (brace_node, brace_idx) = find_leaf_at_offset(file.syntax(), offset) - .filter_map(|node| { - let idx = BRACES.iter().position(|&brace| brace == node.kind())?; - Some((node, idx)) - }) - .next()?; - let parent = brace_node.parent()?; - let matching_kind = BRACES[brace_idx ^ 1]; - let matching_node = parent - .children() - .find(|node| node.kind() == matching_kind)?; - Some(matching_node.range().start()) -} - -pub fn highlight(root: &SyntaxNode) -> Vec { - // Visited nodes to handle highlighting priorities - let mut highlighted = FxHashSet::default(); - let mut res = Vec::new(); - for node in root.descendants() { - if highlighted.contains(&node) { - continue; - } - let tag = match node.kind() { - COMMENT => "comment", - STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => "string", - ATTR => "attribute", - NAME_REF => "text", - NAME => "function", - INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE => "literal", - LIFETIME => "parameter", - k if k.is_keyword() => "keyword", - _ => { - if let Some(macro_call) = ast::MacroCall::cast(node) { - if let Some(path) = macro_call.path() { - if let Some(segment) = path.segment() { - if let Some(name_ref) = segment.name_ref() { - highlighted.insert(name_ref.syntax()); - let range_start = name_ref.syntax().range().start(); - let mut range_end = name_ref.syntax().range().end(); - for sibling in path.syntax().siblings(Direction::Next) { - match sibling.kind() { - EXCL | IDENT => range_end = sibling.range().end(), - _ => (), - } - } - res.push(HighlightedRange { - range: TextRange::from_to(range_start, range_end), - tag: "macro", - }) - } - } - } - } - continue; - } - }; - res.push(HighlightedRange { - range: node.range(), - tag, - }) - } - res -} - -pub fn syntax_tree(file: &SourceFile) -> String { - ::ra_syntax::utils::dump_tree(file.syntax()) -} - -#[cfg(test)] -mod tests { - use ra_syntax::AstNode; - - use crate::test_utils::{add_cursor, assert_eq_dbg, assert_eq_text, extract_offset}; - - use super::*; - - #[test] - fn test_highlighting() { - let file = SourceFile::parse( - r#" -// comment -fn main() {} - println!("Hello, {}!", 92); -"#, - ); - let hls = highlight(file.syntax()); - assert_eq_dbg( - r#"[HighlightedRange { range: [1; 11), tag: "comment" }, - HighlightedRange { range: [12; 14), tag: "keyword" }, - HighlightedRange { range: [15; 19), tag: "function" }, - HighlightedRange { range: [29; 37), tag: "macro" }, - HighlightedRange { range: [38; 50), tag: "string" }, - HighlightedRange { range: [52; 54), tag: "literal" }]"#, - &hls, - ); - } - - #[test] - fn test_matching_brace() { - fn do_check(before: &str, after: &str) { - let (pos, before) = extract_offset(before); - let file = SourceFile::parse(&before); - let new_pos = match matching_brace(&file, pos) { - None => pos, - Some(pos) => pos, - }; - let actual = add_cursor(&before, new_pos); - assert_eq_text!(after, &actual); - } - - do_check("struct Foo { a: i32, }<|>", "struct Foo <|>{ a: i32, }"); - } - -} diff --git a/crates/ra_editor/src/line_index.rs b/crates/ra_editor/src/line_index.rs deleted file mode 100644 index 898fee7e0..000000000 --- a/crates/ra_editor/src/line_index.rs +++ /dev/null @@ -1,399 +0,0 @@ -use crate::TextUnit; -use rustc_hash::FxHashMap; -use superslice::Ext; - -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct LineIndex { - pub(crate) newlines: Vec, - pub(crate) utf16_lines: FxHashMap>, -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct LineCol { - pub line: u32, - pub col_utf16: u32, -} - -#[derive(Clone, Debug, Hash, PartialEq, Eq)] -pub(crate) struct Utf16Char { - pub(crate) start: TextUnit, - pub(crate) end: TextUnit, -} - -impl Utf16Char { - fn len(&self) -> TextUnit { - self.end - self.start - } -} - -impl LineIndex { - pub fn new(text: &str) -> LineIndex { - let mut utf16_lines = FxHashMap::default(); - let mut utf16_chars = Vec::new(); - - let mut newlines = vec![0.into()]; - let mut curr_row = 0.into(); - let mut curr_col = 0.into(); - let mut line = 0; - for c in text.chars() { - curr_row += TextUnit::of_char(c); - if c == '\n' { - newlines.push(curr_row); - - // Save any utf-16 characters seen in the previous line - if utf16_chars.len() > 0 { - utf16_lines.insert(line, utf16_chars); - utf16_chars = Vec::new(); - } - - // Prepare for processing the next line - curr_col = 0.into(); - line += 1; - continue; - } - - let char_len = TextUnit::of_char(c); - if char_len.to_usize() > 1 { - utf16_chars.push(Utf16Char { - start: curr_col, - end: curr_col + char_len, - }); - } - - curr_col += char_len; - } - - // Save any utf-16 characters seen in the last line - if utf16_chars.len() > 0 { - utf16_lines.insert(line, utf16_chars); - } - - LineIndex { - newlines, - utf16_lines, - } - } - - pub fn line_col(&self, offset: TextUnit) -> LineCol { - let line = self.newlines.upper_bound(&offset) - 1; - let line_start_offset = self.newlines[line]; - let col = offset - line_start_offset; - - LineCol { - line: line as u32, - col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32, - } - } - - pub fn offset(&self, line_col: LineCol) -> TextUnit { - //TODO: return Result - let col = self.utf16_to_utf8_col(line_col.line, line_col.col_utf16); - self.newlines[line_col.line as usize] + col - } - - fn utf8_to_utf16_col(&self, line: u32, mut col: TextUnit) -> usize { - if let Some(utf16_chars) = self.utf16_lines.get(&line) { - let mut correction = TextUnit::from_usize(0); - for c in utf16_chars { - if col >= c.end { - correction += c.len() - TextUnit::from_usize(1); - } else { - // From here on, all utf16 characters come *after* the character we are mapping, - // so we don't need to take them into account - break; - } - } - - col -= correction; - } - - col.to_usize() - } - - fn utf16_to_utf8_col(&self, line: u32, col: u32) -> TextUnit { - let mut col: TextUnit = col.into(); - if let Some(utf16_chars) = self.utf16_lines.get(&line) { - for c in utf16_chars { - if col >= c.start { - col += c.len() - TextUnit::from_usize(1); - } else { - // From here on, all utf16 characters come *after* the character we are mapping, - // so we don't need to take them into account - break; - } - } - } - - col - } -} - -#[cfg(test)] -/// Simple reference implementation to use in proptests -pub fn to_line_col(text: &str, offset: TextUnit) -> LineCol { - let mut res = LineCol { - line: 0, - col_utf16: 0, - }; - for (i, c) in text.char_indices() { - if i + c.len_utf8() > offset.to_usize() { - // if it's an invalid offset, inside a multibyte char - // return as if it was at the start of the char - break; - } - if c == '\n' { - res.line += 1; - res.col_utf16 = 0; - } else { - res.col_utf16 += 1; - } - } - res -} - -#[cfg(test)] -mod test_line_index { - use super::*; - use proptest::{prelude::*, proptest, proptest_helper}; - use ra_text_edit::test_utils::{arb_text, arb_offset}; - - #[test] - fn test_line_index() { - let text = "hello\nworld"; - let index = LineIndex::new(text); - assert_eq!( - index.line_col(0.into()), - LineCol { - line: 0, - col_utf16: 0 - } - ); - assert_eq!( - index.line_col(1.into()), - LineCol { - line: 0, - col_utf16: 1 - } - ); - assert_eq!( - index.line_col(5.into()), - LineCol { - line: 0, - col_utf16: 5 - } - ); - assert_eq!( - index.line_col(6.into()), - LineCol { - line: 1, - col_utf16: 0 - } - ); - assert_eq!( - index.line_col(7.into()), - LineCol { - line: 1, - col_utf16: 1 - } - ); - assert_eq!( - index.line_col(8.into()), - LineCol { - line: 1, - col_utf16: 2 - } - ); - assert_eq!( - index.line_col(10.into()), - LineCol { - line: 1, - col_utf16: 4 - } - ); - assert_eq!( - index.line_col(11.into()), - LineCol { - line: 1, - col_utf16: 5 - } - ); - assert_eq!( - index.line_col(12.into()), - LineCol { - line: 1, - col_utf16: 6 - } - ); - - let text = "\nhello\nworld"; - let index = LineIndex::new(text); - assert_eq!( - index.line_col(0.into()), - LineCol { - line: 0, - col_utf16: 0 - } - ); - assert_eq!( - index.line_col(1.into()), - LineCol { - line: 1, - col_utf16: 0 - } - ); - assert_eq!( - index.line_col(2.into()), - LineCol { - line: 1, - col_utf16: 1 - } - ); - assert_eq!( - index.line_col(6.into()), - LineCol { - line: 1, - col_utf16: 5 - } - ); - assert_eq!( - index.line_col(7.into()), - LineCol { - line: 2, - col_utf16: 0 - } - ); - } - - fn arb_text_with_offset() -> BoxedStrategy<(TextUnit, String)> { - arb_text() - .prop_flat_map(|text| (arb_offset(&text), Just(text))) - .boxed() - } - - fn to_line_col(text: &str, offset: TextUnit) -> LineCol { - let mut res = LineCol { - line: 0, - col_utf16: 0, - }; - for (i, c) in text.char_indices() { - if i + c.len_utf8() > offset.to_usize() { - // if it's an invalid offset, inside a multibyte char - // return as if it was at the start of the char - break; - } - if c == '\n' { - res.line += 1; - res.col_utf16 = 0; - } else { - res.col_utf16 += 1; - } - } - res - } - - proptest! { - #[test] - fn test_line_index_proptest((offset, text) in arb_text_with_offset()) { - let expected = to_line_col(&text, offset); - let line_index = LineIndex::new(&text); - let actual = line_index.line_col(offset); - - assert_eq!(actual, expected); - } - } -} - -#[cfg(test)] -mod test_utf8_utf16_conv { - use super::*; - - #[test] - fn test_char_len() { - assert_eq!('メ'.len_utf8(), 3); - assert_eq!('メ'.len_utf16(), 1); - } - - #[test] - fn test_empty_index() { - let col_index = LineIndex::new( - " -const C: char = 'x'; -", - ); - assert_eq!(col_index.utf16_lines.len(), 0); - } - - #[test] - fn test_single_char() { - let col_index = LineIndex::new( - " -const C: char = 'メ'; -", - ); - - assert_eq!(col_index.utf16_lines.len(), 1); - assert_eq!(col_index.utf16_lines[&1].len(), 1); - assert_eq!( - col_index.utf16_lines[&1][0], - Utf16Char { - start: 17.into(), - end: 20.into() - } - ); - - // UTF-8 to UTF-16, no changes - assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); - - // UTF-8 to UTF-16 - assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20); - - // UTF-16 to UTF-8, no changes - assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextUnit::from(15)); - - // UTF-16 to UTF-8 - assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextUnit::from(21)); - } - - #[test] - fn test_string() { - let col_index = LineIndex::new( - " -const C: char = \"メ メ\"; -", - ); - - assert_eq!(col_index.utf16_lines.len(), 1); - assert_eq!(col_index.utf16_lines[&1].len(), 2); - assert_eq!( - col_index.utf16_lines[&1][0], - Utf16Char { - start: 17.into(), - end: 20.into() - } - ); - assert_eq!( - col_index.utf16_lines[&1][1], - Utf16Char { - start: 21.into(), - end: 24.into() - } - ); - - // UTF-8 to UTF-16 - assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); - - assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19); - assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21); - - assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15); - - // UTF-16 to UTF-8 - assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextUnit::from_usize(15)); - - assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextUnit::from_usize(20)); - assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextUnit::from_usize(23)); - - assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextUnit::from_usize(15)); - } - -} diff --git a/crates/ra_editor/src/line_index_utils.rs b/crates/ra_editor/src/line_index_utils.rs deleted file mode 100644 index ec3269bbb..000000000 --- a/crates/ra_editor/src/line_index_utils.rs +++ /dev/null @@ -1,363 +0,0 @@ -use ra_text_edit::{AtomTextEdit, TextEdit}; -use ra_syntax::{TextUnit, TextRange}; -use crate::{LineIndex, LineCol, line_index::Utf16Char}; - -#[derive(Debug, Clone)] -enum Step { - Newline(TextUnit), - Utf16Char(TextRange), -} - -#[derive(Debug)] -struct LineIndexStepIter<'a> { - line_index: &'a LineIndex, - next_newline_idx: usize, - utf16_chars: Option<(TextUnit, std::slice::Iter<'a, Utf16Char>)>, -} - -impl<'a> LineIndexStepIter<'a> { - fn from(line_index: &LineIndex) -> LineIndexStepIter { - let mut x = LineIndexStepIter { - line_index, - next_newline_idx: 0, - utf16_chars: None, - }; - // skip first newline since it's not real - x.next(); - x - } -} - -impl<'a> Iterator for LineIndexStepIter<'a> { - type Item = Step; - fn next(&mut self) -> Option { - self.utf16_chars - .as_mut() - .and_then(|(newline, x)| { - let x = x.next()?; - Some(Step::Utf16Char(TextRange::from_to( - *newline + x.start, - *newline + x.end, - ))) - }) - .or_else(|| { - let next_newline = *self.line_index.newlines.get(self.next_newline_idx)?; - self.utf16_chars = self - .line_index - .utf16_lines - .get(&(self.next_newline_idx as u32)) - .map(|x| (next_newline, x.iter())); - self.next_newline_idx += 1; - Some(Step::Newline(next_newline)) - }) - } -} - -#[derive(Debug)] -struct OffsetStepIter<'a> { - text: &'a str, - offset: TextUnit, -} - -impl<'a> Iterator for OffsetStepIter<'a> { - type Item = Step; - fn next(&mut self) -> Option { - let (next, next_offset) = self - .text - .char_indices() - .filter_map(|(i, c)| { - if c == '\n' { - let next_offset = self.offset + TextUnit::from_usize(i + 1); - let next = Step::Newline(next_offset); - Some((next, next_offset)) - } else { - let char_len = TextUnit::of_char(c); - if char_len.to_usize() > 1 { - let start = self.offset + TextUnit::from_usize(i); - let end = start + char_len; - let next = Step::Utf16Char(TextRange::from_to(start, end)); - let next_offset = end; - Some((next, next_offset)) - } else { - None - } - } - }) - .next()?; - let next_idx = (next_offset - self.offset).to_usize(); - self.text = &self.text[next_idx..]; - self.offset = next_offset; - Some(next) - } -} - -#[derive(Debug)] -enum NextSteps<'a> { - Use, - ReplaceMany(OffsetStepIter<'a>), - AddMany(OffsetStepIter<'a>), -} - -#[derive(Debug)] -struct TranslatedEdit<'a> { - delete: TextRange, - insert: &'a str, - diff: i64, -} - -struct Edits<'a> { - edits: &'a [AtomTextEdit], - current: Option>, - acc_diff: i64, -} - -impl<'a> Edits<'a> { - fn from_text_edit(text_edit: &'a TextEdit) -> Edits<'a> { - let mut x = Edits { - edits: text_edit.as_atoms(), - current: None, - acc_diff: 0, - }; - x.advance_edit(); - x - } - fn advance_edit(&mut self) { - self.acc_diff += self.current.as_ref().map_or(0, |x| x.diff); - match self.edits.split_first() { - Some((next, rest)) => { - let delete = self.translate_range(next.delete); - let diff = next.insert.len() as i64 - next.delete.len().to_usize() as i64; - self.current = Some(TranslatedEdit { - delete, - insert: &next.insert, - diff, - }); - self.edits = rest; - } - None => { - self.current = None; - } - } - } - - fn next_inserted_steps(&mut self) -> Option> { - let cur = self.current.as_ref()?; - let res = Some(OffsetStepIter { - offset: cur.delete.start(), - text: &cur.insert, - }); - self.advance_edit(); - res - } - - fn next_steps(&mut self, step: &Step) -> NextSteps { - let step_pos = match step { - &Step::Newline(n) => n, - &Step::Utf16Char(r) => r.end(), - }; - let res = match &mut self.current { - Some(edit) => { - if step_pos <= edit.delete.start() { - NextSteps::Use - } else if step_pos <= edit.delete.end() { - let iter = OffsetStepIter { - offset: edit.delete.start(), - text: &edit.insert, - }; - // empty slice to avoid returning steps again - edit.insert = &edit.insert[edit.insert.len()..]; - NextSteps::ReplaceMany(iter) - } else { - let iter = OffsetStepIter { - offset: edit.delete.start(), - text: &edit.insert, - }; - // empty slice to avoid returning steps again - edit.insert = &edit.insert[edit.insert.len()..]; - self.advance_edit(); - NextSteps::AddMany(iter) - } - } - None => NextSteps::Use, - }; - res - } - - fn translate_range(&self, range: TextRange) -> TextRange { - if self.acc_diff == 0 { - range - } else { - let start = self.translate(range.start()); - let end = self.translate(range.end()); - TextRange::from_to(start, end) - } - } - - fn translate(&self, x: TextUnit) -> TextUnit { - if self.acc_diff == 0 { - x - } else { - TextUnit::from((x.to_usize() as i64 + self.acc_diff) as u32) - } - } - - fn translate_step(&self, x: &Step) -> Step { - if self.acc_diff == 0 { - x.clone() - } else { - match x { - &Step::Newline(n) => Step::Newline(self.translate(n)), - &Step::Utf16Char(r) => Step::Utf16Char(self.translate_range(r)), - } - } - } -} - -#[derive(Debug)] -struct RunningLineCol { - line: u32, - last_newline: TextUnit, - col_adjust: TextUnit, -} - -impl RunningLineCol { - fn new() -> RunningLineCol { - RunningLineCol { - line: 0, - last_newline: TextUnit::from(0), - col_adjust: TextUnit::from(0), - } - } - - fn to_line_col(&self, offset: TextUnit) -> LineCol { - LineCol { - line: self.line, - col_utf16: ((offset - self.last_newline) - self.col_adjust).into(), - } - } - - fn add_line(&mut self, newline: TextUnit) { - self.line += 1; - self.last_newline = newline; - self.col_adjust = TextUnit::from(0); - } - - fn adjust_col(&mut self, range: &TextRange) { - self.col_adjust += range.len() - TextUnit::from(1); - } -} - -pub fn translate_offset_with_edit( - line_index: &LineIndex, - offset: TextUnit, - text_edit: &TextEdit, -) -> LineCol { - let mut state = Edits::from_text_edit(&text_edit); - - let mut res = RunningLineCol::new(); - - macro_rules! test_step { - ($x:ident) => { - match &$x { - Step::Newline(n) => { - if offset < *n { - return res.to_line_col(offset); - } else { - res.add_line(*n); - } - } - Step::Utf16Char(x) => { - if offset < x.end() { - // if the offset is inside a multibyte char it's invalid - // clamp it to the start of the char - let clamp = offset.min(x.start()); - return res.to_line_col(clamp); - } else { - res.adjust_col(x); - } - } - } - }; - } - - for orig_step in LineIndexStepIter::from(line_index) { - loop { - let translated_step = state.translate_step(&orig_step); - match state.next_steps(&translated_step) { - NextSteps::Use => { - test_step!(translated_step); - break; - } - NextSteps::ReplaceMany(ns) => { - for n in ns { - test_step!(n); - } - break; - } - NextSteps::AddMany(ns) => { - for n in ns { - test_step!(n); - } - } - } - } - } - - loop { - match state.next_inserted_steps() { - None => break, - Some(ns) => { - for n in ns { - test_step!(n); - } - } - } - } - - res.to_line_col(offset) -} - -#[cfg(test)] -mod test { - use super::*; - use proptest::{prelude::*, proptest, proptest_helper}; - use crate::line_index; - use ra_text_edit::test_utils::{arb_offset, arb_text_with_edit}; - use ra_text_edit::TextEdit; - - #[derive(Debug)] - struct ArbTextWithEditAndOffset { - text: String, - edit: TextEdit, - edited_text: String, - offset: TextUnit, - } - - fn arb_text_with_edit_and_offset() -> BoxedStrategy { - arb_text_with_edit() - .prop_flat_map(|x| { - let edited_text = x.edit.apply(&x.text); - let arb_offset = arb_offset(&edited_text); - (Just(x), Just(edited_text), arb_offset).prop_map(|(x, edited_text, offset)| { - ArbTextWithEditAndOffset { - text: x.text, - edit: x.edit, - edited_text, - offset, - } - }) - }) - .boxed() - } - - proptest! { - #[test] - fn test_translate_offset_with_edit(x in arb_text_with_edit_and_offset()) { - let expected = line_index::to_line_col(&x.edited_text, x.offset); - let line_index = LineIndex::new(&x.text); - let actual = translate_offset_with_edit(&line_index, x.offset, &x.edit); - - assert_eq!(actual, expected); - } - } -} diff --git a/crates/ra_editor/src/structure.rs b/crates/ra_editor/src/structure.rs deleted file mode 100644 index 8bd57555f..000000000 --- a/crates/ra_editor/src/structure.rs +++ /dev/null @@ -1,129 +0,0 @@ -use crate::TextRange; - -use ra_syntax::{ - algo::visit::{visitor, Visitor}, - ast::{self, NameOwner}, - AstNode, SourceFile, SyntaxKind, SyntaxNode, WalkEvent, -}; - -#[derive(Debug, Clone)] -pub struct StructureNode { - pub parent: Option, - pub label: String, - pub navigation_range: TextRange, - pub node_range: TextRange, - pub kind: SyntaxKind, -} - -pub fn file_structure(file: &SourceFile) -> Vec { - let mut res = Vec::new(); - let mut stack = Vec::new(); - - for event in file.syntax().preorder() { - match event { - WalkEvent::Enter(node) => { - if let Some(mut symbol) = structure_node(node) { - symbol.parent = stack.last().map(|&n| n); - stack.push(res.len()); - res.push(symbol); - } - } - WalkEvent::Leave(node) => { - if structure_node(node).is_some() { - stack.pop().unwrap(); - } - } - } - } - res -} - -fn structure_node(node: &SyntaxNode) -> Option { - fn decl(node: &N) -> Option { - let name = node.name()?; - Some(StructureNode { - parent: None, - label: name.text().to_string(), - navigation_range: name.syntax().range(), - node_range: node.syntax().range(), - kind: node.syntax().kind(), - }) - } - - visitor() - .visit(decl::) - .visit(decl::) - .visit(decl::) - .visit(decl::) - .visit(decl::) - .visit(decl::) - .visit(decl::) - .visit(decl::) - .visit(decl::) - .visit(|im: &ast::ImplBlock| { - let target_type = im.target_type()?; - let target_trait = im.target_trait(); - let label = match target_trait { - None => format!("impl {}", target_type.syntax().text()), - Some(t) => format!( - "impl {} for {}", - t.syntax().text(), - target_type.syntax().text(), - ), - }; - - let node = StructureNode { - parent: None, - label, - navigation_range: target_type.syntax().range(), - node_range: im.syntax().range(), - kind: im.syntax().kind(), - }; - Some(node) - }) - .accept(node)? -} - -#[cfg(test)] -mod tests { - use super::*; - use test_utils::assert_eq_dbg; - - #[test] - fn test_file_structure() { - let file = SourceFile::parse( - r#" -struct Foo { - x: i32 -} - -mod m { - fn bar() {} -} - -enum E { X, Y(i32) } -type T = (); -static S: i32 = 92; -const C: i32 = 92; - -impl E {} - -impl fmt::Debug for E {} -"#, - ); - let structure = file_structure(&file); - assert_eq_dbg( - r#"[StructureNode { parent: None, label: "Foo", navigation_range: [8; 11), node_range: [1; 26), kind: STRUCT_DEF }, - StructureNode { parent: Some(0), label: "x", navigation_range: [18; 19), node_range: [18; 24), kind: NAMED_FIELD_DEF }, - StructureNode { parent: None, label: "m", navigation_range: [32; 33), node_range: [28; 53), kind: MODULE }, - StructureNode { parent: Some(2), label: "bar", navigation_range: [43; 46), node_range: [40; 51), kind: FN_DEF }, - StructureNode { parent: None, label: "E", navigation_range: [60; 61), node_range: [55; 75), kind: ENUM_DEF }, - StructureNode { parent: None, label: "T", navigation_range: [81; 82), node_range: [76; 88), kind: TYPE_DEF }, - StructureNode { parent: None, label: "S", navigation_range: [96; 97), node_range: [89; 108), kind: STATIC_DEF }, - StructureNode { parent: None, label: "C", navigation_range: [115; 116), node_range: [109; 127), kind: CONST_DEF }, - StructureNode { parent: None, label: "impl E", navigation_range: [134; 135), node_range: [129; 138), kind: IMPL_BLOCK }, - StructureNode { parent: None, label: "impl fmt::Debug for E", navigation_range: [160; 161), node_range: [140; 164), kind: IMPL_BLOCK }]"#, - &structure, - ) - } -} diff --git a/crates/ra_editor/src/test_utils.rs b/crates/ra_editor/src/test_utils.rs deleted file mode 100644 index dc2470aa3..000000000 --- a/crates/ra_editor/src/test_utils.rs +++ /dev/null @@ -1,41 +0,0 @@ -use ra_syntax::{SourceFile, TextRange, TextUnit}; - -use crate::LocalEdit; -pub use test_utils::*; - -pub fn check_action Option>( - before: &str, - after: &str, - f: F, -) { - let (before_cursor_pos, before) = extract_offset(before); - let file = SourceFile::parse(&before); - let result = f(&file, before_cursor_pos).expect("code action is not applicable"); - let actual = result.edit.apply(&before); - let actual_cursor_pos = match result.cursor_position { - None => result - .edit - .apply_to_offset(before_cursor_pos) - .expect("cursor position is affected by the edit"), - Some(off) => off, - }; - let actual = add_cursor(&actual, actual_cursor_pos); - assert_eq_text!(after, &actual); -} - -pub fn check_action_range Option>( - before: &str, - after: &str, - f: F, -) { - let (range, before) = extract_range(before); - let file = SourceFile::parse(&before); - let result = f(&file, range).expect("code action is not applicable"); - let actual = result.edit.apply(&before); - let actual_cursor_pos = match result.cursor_position { - None => result.edit.apply_to_offset(range.start()).unwrap(), - Some(off) => off, - }; - let actual = add_cursor(&actual, actual_cursor_pos); - assert_eq_text!(after, &actual); -} diff --git a/crates/ra_editor/src/typing.rs b/crates/ra_editor/src/typing.rs deleted file mode 100644 index d8177f245..000000000 --- a/crates/ra_editor/src/typing.rs +++ /dev/null @@ -1,826 +0,0 @@ -use std::mem; - -use itertools::Itertools; -use ra_syntax::{ - algo::{find_node_at_offset, find_covering_node, find_leaf_at_offset, LeafAtOffset}, - ast, - AstNode, Direction, SourceFile, SyntaxKind, - SyntaxKind::*, - SyntaxNode, TextRange, TextUnit, -}; - -use crate::{LocalEdit, TextEditBuilder}; - -pub fn join_lines(file: &SourceFile, range: TextRange) -> LocalEdit { - let range = if range.is_empty() { - let syntax = file.syntax(); - let text = syntax.text().slice(range.start()..); - let pos = match text.find('\n') { - None => { - return LocalEdit { - label: "join lines".to_string(), - edit: TextEditBuilder::default().finish(), - cursor_position: None, - }; - } - Some(pos) => pos, - }; - TextRange::offset_len(range.start() + pos, TextUnit::of_char('\n')) - } else { - range - }; - - let node = find_covering_node(file.syntax(), range); - let mut edit = TextEditBuilder::default(); - for node in node.descendants() { - let text = match node.leaf_text() { - Some(text) => text, - None => continue, - }; - let range = match range.intersection(&node.range()) { - Some(range) => range, - None => continue, - } - node.range().start(); - for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') { - let pos: TextUnit = (pos as u32).into(); - let off = node.range().start() + range.start() + pos; - if !edit.invalidates_offset(off) { - remove_newline(&mut edit, node, text.as_str(), off); - } - } - } - - LocalEdit { - label: "join lines".to_string(), - edit: edit.finish(), - cursor_position: None, - } -} - -pub fn on_enter(file: &SourceFile, offset: TextUnit) -> Option { - let comment = find_leaf_at_offset(file.syntax(), offset) - .left_biased() - .and_then(ast::Comment::cast)?; - - if let ast::CommentFlavor::Multiline = comment.flavor() { - return None; - } - - let prefix = comment.prefix(); - if offset < comment.syntax().range().start() + TextUnit::of_str(prefix) + TextUnit::from(1) { - return None; - } - - let indent = node_indent(file, comment.syntax())?; - let inserted = format!("\n{}{} ", indent, prefix); - let cursor_position = offset + TextUnit::of_str(&inserted); - let mut edit = TextEditBuilder::default(); - edit.insert(offset, inserted); - Some(LocalEdit { - label: "on enter".to_string(), - edit: edit.finish(), - cursor_position: Some(cursor_position), - }) -} - -fn node_indent<'a>(file: &'a SourceFile, node: &SyntaxNode) -> Option<&'a str> { - let ws = match find_leaf_at_offset(file.syntax(), node.range().start()) { - LeafAtOffset::Between(l, r) => { - assert!(r == node); - l - } - LeafAtOffset::Single(n) => { - assert!(n == node); - return Some(""); - } - LeafAtOffset::None => unreachable!(), - }; - if ws.kind() != WHITESPACE { - return None; - } - let text = ws.leaf_text().unwrap(); - let pos = text.as_str().rfind('\n').map(|it| it + 1).unwrap_or(0); - Some(&text[pos..]) -} - -pub fn on_eq_typed(file: &SourceFile, offset: TextUnit) -> Option { - let let_stmt: &ast::LetStmt = find_node_at_offset(file.syntax(), offset)?; - if let_stmt.has_semi() { - return None; - } - if let Some(expr) = let_stmt.initializer() { - let expr_range = expr.syntax().range(); - if expr_range.contains(offset) && offset != expr_range.start() { - return None; - } - if file - .syntax() - .text() - .slice(offset..expr_range.start()) - .contains('\n') - { - return None; - } - } else { - return None; - } - let offset = let_stmt.syntax().range().end(); - let mut edit = TextEditBuilder::default(); - edit.insert(offset, ";".to_string()); - Some(LocalEdit { - label: "add semicolon".to_string(), - edit: edit.finish(), - cursor_position: None, - }) -} - -pub fn on_dot_typed(file: &SourceFile, offset: TextUnit) -> Option { - let before_dot_offset = offset - TextUnit::of_char('.'); - - let whitespace = find_leaf_at_offset(file.syntax(), before_dot_offset).left_biased()?; - - // find whitespace just left of the dot - ast::Whitespace::cast(whitespace)?; - - // make sure there is a method call - let method_call = whitespace - .siblings(Direction::Prev) - // first is whitespace - .skip(1) - .next()?; - - ast::MethodCallExpr::cast(method_call)?; - - // find how much the _method call is indented - let method_chain_indent = method_call - .parent()? - .siblings(Direction::Prev) - .skip(1) - .next()? - .leaf_text() - .map(|x| last_line_indent_in_whitespace(x))?; - - let current_indent = TextUnit::of_str(last_line_indent_in_whitespace(whitespace.leaf_text()?)); - // TODO: indent is always 4 spaces now. A better heuristic could look on the previous line(s) - - let target_indent = TextUnit::of_str(method_chain_indent) + TextUnit::from_usize(4); - - let diff = target_indent - current_indent; - - let indent = "".repeat(diff.to_usize()); - - let cursor_position = offset + diff; - let mut edit = TextEditBuilder::default(); - edit.insert(before_dot_offset, indent); - Some(LocalEdit { - label: "indent dot".to_string(), - edit: edit.finish(), - cursor_position: Some(cursor_position), - }) -} - -/// Finds the last line in the whitespace -fn last_line_indent_in_whitespace(ws: &str) -> &str { - ws.split('\n').last().unwrap_or("") -} - -fn remove_newline( - edit: &mut TextEditBuilder, - node: &SyntaxNode, - node_text: &str, - offset: TextUnit, -) { - if node.kind() != WHITESPACE || node_text.bytes().filter(|&b| b == b'\n').count() != 1 { - // The node is either the first or the last in the file - let suff = &node_text[TextRange::from_to( - offset - node.range().start() + TextUnit::of_char('\n'), - TextUnit::of_str(node_text), - )]; - let spaces = suff.bytes().take_while(|&b| b == b' ').count(); - - edit.replace( - TextRange::offset_len(offset, ((spaces + 1) as u32).into()), - " ".to_string(), - ); - return; - } - - // Special case that turns something like: - // - // ``` - // my_function({<|> - // - // }) - // ``` - // - // into `my_function()` - if join_single_expr_block(edit, node).is_some() { - return; - } - // ditto for - // - // ``` - // use foo::{<|> - // bar - // }; - // ``` - if join_single_use_tree(edit, node).is_some() { - return; - } - - // The node is between two other nodes - let prev = node.prev_sibling().unwrap(); - let next = node.next_sibling().unwrap(); - if is_trailing_comma(prev.kind(), next.kind()) { - // Removes: trailing comma, newline (incl. surrounding whitespace) - edit.delete(TextRange::from_to(prev.range().start(), node.range().end())); - } else if prev.kind() == COMMA && next.kind() == R_CURLY { - // Removes: comma, newline (incl. surrounding whitespace) - let space = if let Some(left) = prev.prev_sibling() { - compute_ws(left, next) - } else { - " " - }; - edit.replace( - TextRange::from_to(prev.range().start(), node.range().end()), - space.to_string(), - ); - } else if let (Some(_), Some(next)) = (ast::Comment::cast(prev), ast::Comment::cast(next)) { - // Removes: newline (incl. surrounding whitespace), start of the next comment - edit.delete(TextRange::from_to( - node.range().start(), - next.syntax().range().start() + TextUnit::of_str(next.prefix()), - )); - } else { - // Remove newline but add a computed amount of whitespace characters - edit.replace(node.range(), compute_ws(prev, next).to_string()); - } -} - -fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool { - match (left, right) { - (COMMA, R_PAREN) | (COMMA, R_BRACK) => true, - _ => false, - } -} - -fn join_single_expr_block(edit: &mut TextEditBuilder, node: &SyntaxNode) -> Option<()> { - let block = ast::Block::cast(node.parent()?)?; - let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?; - let expr = single_expr(block)?; - edit.replace( - block_expr.syntax().range(), - expr.syntax().text().to_string(), - ); - Some(()) -} - -fn single_expr(block: &ast::Block) -> Option<&ast::Expr> { - let mut res = None; - for child in block.syntax().children() { - if let Some(expr) = ast::Expr::cast(child) { - if expr.syntax().text().contains('\n') { - return None; - } - if mem::replace(&mut res, Some(expr)).is_some() { - return None; - } - } else { - match child.kind() { - WHITESPACE | L_CURLY | R_CURLY => (), - _ => return None, - } - } - } - res -} - -fn join_single_use_tree(edit: &mut TextEditBuilder, node: &SyntaxNode) -> Option<()> { - let use_tree_list = ast::UseTreeList::cast(node.parent()?)?; - let (tree,) = use_tree_list.use_trees().collect_tuple()?; - edit.replace( - use_tree_list.syntax().range(), - tree.syntax().text().to_string(), - ); - Some(()) -} - -fn compute_ws(left: &SyntaxNode, right: &SyntaxNode) -> &'static str { - match left.kind() { - L_PAREN | L_BRACK => return "", - L_CURLY => { - if let USE_TREE = right.kind() { - return ""; - } - } - _ => (), - } - match right.kind() { - R_PAREN | R_BRACK => return "", - R_CURLY => { - if let USE_TREE = left.kind() { - return ""; - } - } - DOT => return "", - _ => (), - } - " " -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::test_utils::{ - add_cursor, assert_eq_text, check_action, extract_offset, extract_range, -}; - - fn check_join_lines(before: &str, after: &str) { - check_action(before, after, |file, offset| { - let range = TextRange::offset_len(offset, 0.into()); - let res = join_lines(file, range); - Some(res) - }) - } - - #[test] - fn test_join_lines_comma() { - check_join_lines( - r" -fn foo() { - <|>foo(1, - ) -} -", - r" -fn foo() { - <|>foo(1) -} -", - ); - } - - #[test] - fn test_join_lines_lambda_block() { - check_join_lines( - r" -pub fn reparse(&self, edit: &AtomTextEdit) -> File { - <|>self.incremental_reparse(edit).unwrap_or_else(|| { - self.full_reparse(edit) - }) -} -", - r" -pub fn reparse(&self, edit: &AtomTextEdit) -> File { - <|>self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) -} -", - ); - } - - #[test] - fn test_join_lines_block() { - check_join_lines( - r" -fn foo() { - foo(<|>{ - 92 - }) -}", - r" -fn foo() { - foo(<|>92) -}", - ); - } - - #[test] - fn test_join_lines_use_items_left() { - // No space after the '{' - check_join_lines( - r" -<|>use ra_syntax::{ - TextUnit, TextRange, -};", - r" -<|>use ra_syntax::{TextUnit, TextRange, -};", - ); - } - - #[test] - fn test_join_lines_use_items_right() { - // No space after the '}' - check_join_lines( - r" -use ra_syntax::{ -<|> TextUnit, TextRange -};", - r" -use ra_syntax::{ -<|> TextUnit, TextRange};", - ); - } - - #[test] - fn test_join_lines_use_items_right_comma() { - // No space after the '}' - check_join_lines( - r" -use ra_syntax::{ -<|> TextUnit, TextRange, -};", - r" -use ra_syntax::{ -<|> TextUnit, TextRange};", - ); - } - - #[test] - fn test_join_lines_use_tree() { - check_join_lines( - r" -use ra_syntax::{ - algo::<|>{ - find_leaf_at_offset, - }, - ast, -};", - r" -use ra_syntax::{ - algo::<|>find_leaf_at_offset, - ast, -};", - ); - } - - #[test] - fn test_join_lines_normal_comments() { - check_join_lines( - r" -fn foo() { - // Hello<|> - // world! -} -", - r" -fn foo() { - // Hello<|> world! -} -", - ); - } - - #[test] - fn test_join_lines_doc_comments() { - check_join_lines( - r" -fn foo() { - /// Hello<|> - /// world! -} -", - r" -fn foo() { - /// Hello<|> world! -} -", - ); - } - - #[test] - fn test_join_lines_mod_comments() { - check_join_lines( - r" -fn foo() { - //! Hello<|> - //! world! -} -", - r" -fn foo() { - //! Hello<|> world! -} -", - ); - } - - #[test] - fn test_join_lines_multiline_comments_1() { - check_join_lines( - r" -fn foo() { - // Hello<|> - /* world! */ -} -", - r" -fn foo() { - // Hello<|> world! */ -} -", - ); - } - - #[test] - fn test_join_lines_multiline_comments_2() { - check_join_lines( - r" -fn foo() { - // The<|> - /* quick - brown - fox! */ -} -", - r" -fn foo() { - // The<|> quick - brown - fox! */ -} -", - ); - } - - fn check_join_lines_sel(before: &str, after: &str) { - let (sel, before) = extract_range(before); - let file = SourceFile::parse(&before); - let result = join_lines(&file, sel); - let actual = result.edit.apply(&before); - assert_eq_text!(after, &actual); - } - - #[test] - fn test_join_lines_selection_fn_args() { - check_join_lines_sel( - r" -fn foo() { - <|>foo(1, - 2, - 3, - <|>) -} - ", - r" -fn foo() { - foo(1, 2, 3) -} - ", - ); - } - - #[test] - fn test_join_lines_selection_struct() { - check_join_lines_sel( - r" -struct Foo <|>{ - f: u32, -}<|> - ", - r" -struct Foo { f: u32 } - ", - ); - } - - #[test] - fn test_join_lines_selection_dot_chain() { - check_join_lines_sel( - r" -fn foo() { - join(<|>type_params.type_params() - .filter_map(|it| it.name()) - .map(|it| it.text())<|>) -}", - r" -fn foo() { - join(type_params.type_params().filter_map(|it| it.name()).map(|it| it.text())) -}", - ); - } - - #[test] - fn test_join_lines_selection_lambda_block_body() { - check_join_lines_sel( - r" -pub fn handle_find_matching_brace() { - params.offsets - .map(|offset| <|>{ - world.analysis().matching_brace(&file, offset).unwrap_or(offset) - }<|>) - .collect(); -}", - r" -pub fn handle_find_matching_brace() { - params.offsets - .map(|offset| world.analysis().matching_brace(&file, offset).unwrap_or(offset)) - .collect(); -}", - ); - } - - #[test] - fn test_on_eq_typed() { - fn do_check(before: &str, after: &str) { - let (offset, before) = extract_offset(before); - let file = SourceFile::parse(&before); - let result = on_eq_typed(&file, offset).unwrap(); - let actual = result.edit.apply(&before); - assert_eq_text!(after, &actual); - } - - // do_check(r" - // fn foo() { - // let foo =<|> - // } - // ", r" - // fn foo() { - // let foo =; - // } - // "); - do_check( - r" -fn foo() { - let foo =<|> 1 + 1 -} -", - r" -fn foo() { - let foo = 1 + 1; -} -", - ); - // do_check(r" - // fn foo() { - // let foo =<|> - // let bar = 1; - // } - // ", r" - // fn foo() { - // let foo =; - // let bar = 1; - // } - // "); - } - - #[test] - fn test_on_dot_typed() { - fn do_check(before: &str, after: &str) { - let (offset, before) = extract_offset(before); - let file = SourceFile::parse(&before); - if let Some(result) = on_eq_typed(&file, offset) { - let actual = result.edit.apply(&before); - assert_eq_text!(after, &actual); - }; - } - // indent if continuing chain call - do_check( - r" - pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { - self.child_impl(db, name) - .<|> - } -", - r" - pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { - self.child_impl(db, name) - . - } -", - ); - - // do not indent if already indented - do_check( - r" - pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { - self.child_impl(db, name) - .<|> - } -", - r" - pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { - self.child_impl(db, name) - . - } -", - ); - - // indent if the previous line is already indented - do_check( - r" - pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { - self.child_impl(db, name) - .first() - .<|> - } -", - r" - pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { - self.child_impl(db, name) - .first() - . - } -", - ); - - // don't indent if indent matches previous line - do_check( - r" - pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { - self.child_impl(db, name) - .first() - .<|> - } -", - r" - pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { - self.child_impl(db, name) - .first() - . - } -", - ); - - // don't indent if there is no method call on previous line - do_check( - r" - pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { - .<|> - } -", - r" - pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { - . - } -", - ); - - // indent to match previous expr - do_check( - r" - pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { - self.child_impl(db, name) -.<|> - } -", - r" - pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { - self.child_impl(db, name) - . - } -", - ); - } - - #[test] - fn test_on_enter() { - fn apply_on_enter(before: &str) -> Option { - let (offset, before) = extract_offset(before); - let file = SourceFile::parse(&before); - let result = on_enter(&file, offset)?; - let actual = result.edit.apply(&before); - let actual = add_cursor(&actual, result.cursor_position.unwrap()); - Some(actual) - } - - fn do_check(before: &str, after: &str) { - let actual = apply_on_enter(before).unwrap(); - assert_eq_text!(after, &actual); - } - - fn do_check_noop(text: &str) { - assert!(apply_on_enter(text).is_none()) - } - - do_check( - r" -/// Some docs<|> -fn foo() { -} -", - r" -/// Some docs -/// <|> -fn foo() { -} -", - ); - do_check( - r" -impl S { - /// Some<|> docs. - fn foo() {} -} -", - r" -impl S { - /// Some - /// <|> docs. - fn foo() {} -} -", - ); - do_check_noop(r"<|>//! docz"); - } -} diff --git a/crates/ra_ide_api_light/Cargo.toml b/crates/ra_ide_api_light/Cargo.toml new file mode 100644 index 000000000..a97d2308f --- /dev/null +++ b/crates/ra_ide_api_light/Cargo.toml @@ -0,0 +1,19 @@ +[package] +edition = "2018" +name = "ra_editor" +version = "0.1.0" +authors = ["Aleksey Kladov "] +publish = false + +[dependencies] +itertools = "0.8.0" +superslice = "0.1.0" +join_to_string = "0.1.1" +rustc-hash = "1.0" + +ra_syntax = { path = "../ra_syntax" } +ra_text_edit = { path = "../ra_text_edit" } + +[dev-dependencies] +test_utils = { path = "../test_utils" } +proptest = "0.8.7" diff --git a/crates/ra_ide_api_light/src/assists.rs b/crates/ra_ide_api_light/src/assists.rs new file mode 100644 index 000000000..83eabfc85 --- /dev/null +++ b/crates/ra_ide_api_light/src/assists.rs @@ -0,0 +1,209 @@ +//! This modules contains various "assits": suggestions for source code edits +//! which are likely to occur at a given cursor positon. For example, if the +//! cursor is on the `,`, a possible assist is swapping the elments around the +//! comma. + +mod flip_comma; +mod add_derive; +mod add_impl; +mod introduce_variable; +mod change_visibility; +mod split_import; +mod replace_if_let_with_match; + +use ra_text_edit::{TextEdit, TextEditBuilder}; +use ra_syntax::{ + Direction, SyntaxNode, TextUnit, TextRange, SourceFile, AstNode, + algo::{find_leaf_at_offset, find_node_at_offset, find_covering_node, LeafAtOffset}, + ast::{self, AstToken}, +}; +use itertools::Itertools; + +pub use self::{ + flip_comma::flip_comma, + add_derive::add_derive, + add_impl::add_impl, + introduce_variable::introduce_variable, + change_visibility::change_visibility, + split_import::split_import, + replace_if_let_with_match::replace_if_let_with_match, +}; + +/// Return all the assists applicable at the given position. +pub fn assists(file: &SourceFile, range: TextRange) -> Vec { + let ctx = AssistCtx::new(file, range); + [ + flip_comma, + add_derive, + add_impl, + introduce_variable, + change_visibility, + split_import, + replace_if_let_with_match, + ] + .iter() + .filter_map(|&assist| ctx.clone().apply(assist)) + .collect() +} + +#[derive(Debug)] +pub struct LocalEdit { + pub label: String, + pub edit: TextEdit, + pub cursor_position: Option, +} + +fn non_trivia_sibling(node: &SyntaxNode, direction: Direction) -> Option<&SyntaxNode> { + node.siblings(direction) + .skip(1) + .find(|node| !node.kind().is_trivia()) +} + +/// `AssistCtx` allows to apply an assist or check if it could be applied. +/// +/// Assists use a somewhat overengeneered approach, given the current needs. The +/// assists workflow consists of two phases. In the first phase, a user asks for +/// the list of available assists. In the second phase, the user picks a +/// particular assist and it gets applied. +/// +/// There are two peculiarities here: +/// +/// * first, we ideally avoid computing more things then neccessary to answer +/// "is assist applicable" in the first phase. +/// * second, when we are appling assist, we don't have a gurantee that there +/// weren't any changes between the point when user asked for assists and when +/// they applied a particular assist. So, when applying assist, we need to do +/// all the checks from scratch. +/// +/// To avoid repeating the same code twice for both "check" and "apply" +/// functions, we use an approach remeniscent of that of Django's function based +/// views dealing with forms. Each assist receives a runtime parameter, +/// `should_compute_edit`. It first check if an edit is applicable (potentially +/// computing info required to compute the actual edit). If it is applicable, +/// and `should_compute_edit` is `true`, it then computes the actual edit. +/// +/// So, to implement the original assists workflow, we can first apply each edit +/// with `should_compute_edit = false`, and then applying the selected edit +/// again, with `should_compute_edit = true` this time. +/// +/// Note, however, that we don't actually use such two-phase logic at the +/// moment, because the LSP API is pretty awkward in this place, and it's much +/// easier to just compute the edit eagarly :-) +#[derive(Debug, Clone)] +pub struct AssistCtx<'a> { + source_file: &'a SourceFile, + range: TextRange, + should_compute_edit: bool, +} + +#[derive(Debug)] +pub enum Assist { + Applicable, + Edit(LocalEdit), +} + +#[derive(Default)] +struct AssistBuilder { + edit: TextEditBuilder, + cursor_position: Option, +} + +impl<'a> AssistCtx<'a> { + pub fn new(source_file: &'a SourceFile, range: TextRange) -> AssistCtx { + AssistCtx { + source_file, + range, + should_compute_edit: false, + } + } + + pub fn apply(mut self, assist: fn(AssistCtx) -> Option) -> Option { + self.should_compute_edit = true; + match assist(self) { + None => None, + Some(Assist::Edit(e)) => Some(e), + Some(Assist::Applicable) => unreachable!(), + } + } + + pub fn check(mut self, assist: fn(AssistCtx) -> Option) -> bool { + self.should_compute_edit = false; + match assist(self) { + None => false, + Some(Assist::Edit(_)) => unreachable!(), + Some(Assist::Applicable) => true, + } + } + + fn build(self, label: impl Into, f: impl FnOnce(&mut AssistBuilder)) -> Option { + if !self.should_compute_edit { + return Some(Assist::Applicable); + } + let mut edit = AssistBuilder::default(); + f(&mut edit); + Some(Assist::Edit(LocalEdit { + label: label.into(), + edit: edit.edit.finish(), + cursor_position: edit.cursor_position, + })) + } + + pub(crate) fn leaf_at_offset(&self) -> LeafAtOffset<&'a SyntaxNode> { + find_leaf_at_offset(self.source_file.syntax(), self.range.start()) + } + pub(crate) fn node_at_offset(&self) -> Option<&'a N> { + find_node_at_offset(self.source_file.syntax(), self.range.start()) + } + pub(crate) fn covering_node(&self) -> &'a SyntaxNode { + find_covering_node(self.source_file.syntax(), self.range) + } +} + +impl AssistBuilder { + fn replace(&mut self, range: TextRange, replace_with: impl Into) { + self.edit.replace(range, replace_with.into()) + } + fn replace_node_and_indent(&mut self, node: &SyntaxNode, replace_with: impl Into) { + let mut replace_with = replace_with.into(); + if let Some(indent) = calc_indent(node) { + replace_with = reindent(&replace_with, indent) + } + self.replace(node.range(), replace_with) + } + #[allow(unused)] + fn delete(&mut self, range: TextRange) { + self.edit.delete(range) + } + fn insert(&mut self, offset: TextUnit, text: impl Into) { + self.edit.insert(offset, text.into()) + } + fn set_cursor(&mut self, offset: TextUnit) { + self.cursor_position = Some(offset) + } +} + +fn calc_indent(node: &SyntaxNode) -> Option<&str> { + let prev = node.prev_sibling()?; + let ws_text = ast::Whitespace::cast(prev)?.text(); + ws_text.rfind('\n').map(|pos| &ws_text[pos + 1..]) +} + +fn reindent(text: &str, indent: &str) -> String { + let indent = format!("\n{}", indent); + text.lines().intersperse(&indent).collect() +} + +#[cfg(test)] +fn check_assist(assist: fn(AssistCtx) -> Option, before: &str, after: &str) { + crate::test_utils::check_action(before, after, |file, off| { + let range = TextRange::offset_len(off, 0.into()); + AssistCtx::new(file, range).apply(assist) + }) +} + +#[cfg(test)] +fn check_assist_range(assist: fn(AssistCtx) -> Option, before: &str, after: &str) { + crate::test_utils::check_action_range(before, after, |file, range| { + AssistCtx::new(file, range).apply(assist) + }) +} diff --git a/crates/ra_ide_api_light/src/assists/add_derive.rs b/crates/ra_ide_api_light/src/assists/add_derive.rs new file mode 100644 index 000000000..6e964d011 --- /dev/null +++ b/crates/ra_ide_api_light/src/assists/add_derive.rs @@ -0,0 +1,84 @@ +use ra_syntax::{ + ast::{self, AstNode, AttrsOwner}, + SyntaxKind::{WHITESPACE, COMMENT}, + TextUnit, +}; + +use crate::assists::{AssistCtx, Assist}; + +pub fn add_derive(ctx: AssistCtx) -> Option { + let nominal = ctx.node_at_offset::()?; + let node_start = derive_insertion_offset(nominal)?; + ctx.build("add `#[derive]`", |edit| { + let derive_attr = nominal + .attrs() + .filter_map(|x| x.as_call()) + .filter(|(name, _arg)| name == "derive") + .map(|(_name, arg)| arg) + .next(); + let offset = match derive_attr { + None => { + edit.insert(node_start, "#[derive()]\n"); + node_start + TextUnit::of_str("#[derive(") + } + Some(tt) => tt.syntax().range().end() - TextUnit::of_char(')'), + }; + edit.set_cursor(offset) + }) +} + +// Insert `derive` after doc comments. +fn derive_insertion_offset(nominal: &ast::NominalDef) -> Option { + let non_ws_child = nominal + .syntax() + .children() + .find(|it| it.kind() != COMMENT && it.kind() != WHITESPACE)?; + Some(non_ws_child.range().start()) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::assists::check_assist; + + #[test] + fn add_derive_new() { + check_assist( + add_derive, + "struct Foo { a: i32, <|>}", + "#[derive(<|>)]\nstruct Foo { a: i32, }", + ); + check_assist( + add_derive, + "struct Foo { <|> a: i32, }", + "#[derive(<|>)]\nstruct Foo { a: i32, }", + ); + } + + #[test] + fn add_derive_existing() { + check_assist( + add_derive, + "#[derive(Clone)]\nstruct Foo { a: i32<|>, }", + "#[derive(Clone<|>)]\nstruct Foo { a: i32, }", + ); + } + + #[test] + fn add_derive_new_with_doc_comment() { + check_assist( + add_derive, + " +/// `Foo` is a pretty important struct. +/// It does stuff. +struct Foo { a: i32<|>, } + ", + " +/// `Foo` is a pretty important struct. +/// It does stuff. +#[derive(<|>)] +struct Foo { a: i32, } + ", + ); + } +} diff --git a/crates/ra_ide_api_light/src/assists/add_impl.rs b/crates/ra_ide_api_light/src/assists/add_impl.rs new file mode 100644 index 000000000..2eda7cae2 --- /dev/null +++ b/crates/ra_ide_api_light/src/assists/add_impl.rs @@ -0,0 +1,66 @@ +use join_to_string::join; +use ra_syntax::{ + ast::{self, AstNode, AstToken, NameOwner, TypeParamsOwner}, + TextUnit, +}; + +use crate::assists::{AssistCtx, Assist}; + +pub fn add_impl(ctx: AssistCtx) -> Option { + let nominal = ctx.node_at_offset::()?; + let name = nominal.name()?; + ctx.build("add impl", |edit| { + let type_params = nominal.type_param_list(); + let start_offset = nominal.syntax().range().end(); + let mut buf = String::new(); + buf.push_str("\n\nimpl"); + if let Some(type_params) = type_params { + type_params.syntax().text().push_to(&mut buf); + } + buf.push_str(" "); + buf.push_str(name.text().as_str()); + if let Some(type_params) = type_params { + let lifetime_params = type_params + .lifetime_params() + .filter_map(|it| it.lifetime()) + .map(|it| it.text()); + let type_params = type_params + .type_params() + .filter_map(|it| it.name()) + .map(|it| it.text()); + join(lifetime_params.chain(type_params)) + .surround_with("<", ">") + .to_buf(&mut buf); + } + buf.push_str(" {\n"); + edit.set_cursor(start_offset + TextUnit::of_str(&buf)); + buf.push_str("\n}"); + edit.insert(start_offset, buf); + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::assists::check_assist; + + #[test] + fn test_add_impl() { + check_assist( + add_impl, + "struct Foo {<|>}\n", + "struct Foo {}\n\nimpl Foo {\n<|>\n}\n", + ); + check_assist( + add_impl, + "struct Foo {<|>}", + "struct Foo {}\n\nimpl Foo {\n<|>\n}", + ); + check_assist( + add_impl, + "struct Foo<'a, T: Foo<'a>> {<|>}", + "struct Foo<'a, T: Foo<'a>> {}\n\nimpl<'a, T: Foo<'a>> Foo<'a, T> {\n<|>\n}", + ); + } + +} diff --git a/crates/ra_ide_api_light/src/assists/change_visibility.rs b/crates/ra_ide_api_light/src/assists/change_visibility.rs new file mode 100644 index 000000000..89729e2c2 --- /dev/null +++ b/crates/ra_ide_api_light/src/assists/change_visibility.rs @@ -0,0 +1,116 @@ +use ra_syntax::{ + AstNode, + ast::{self, VisibilityOwner, NameOwner}, + SyntaxKind::{VISIBILITY, FN_KW, MOD_KW, STRUCT_KW, ENUM_KW, TRAIT_KW, FN_DEF, MODULE, STRUCT_DEF, ENUM_DEF, TRAIT_DEF, IDENT}, +}; + +use crate::assists::{AssistCtx, Assist}; + +pub fn change_visibility(ctx: AssistCtx) -> Option { + if let Some(vis) = ctx.node_at_offset::() { + return change_vis(ctx, vis); + } + add_vis(ctx) +} + +fn add_vis(ctx: AssistCtx) -> Option { + let item_keyword = ctx.leaf_at_offset().find(|leaf| match leaf.kind() { + FN_KW | MOD_KW | STRUCT_KW | ENUM_KW | TRAIT_KW => true, + _ => false, + }); + + let offset = if let Some(keyword) = item_keyword { + let parent = keyword.parent()?; + let def_kws = vec![FN_DEF, MODULE, STRUCT_DEF, ENUM_DEF, TRAIT_DEF]; + // Parent is not a definition, can't add visibility + if !def_kws.iter().any(|&def_kw| def_kw == parent.kind()) { + return None; + } + // Already have visibility, do nothing + if parent.children().any(|child| child.kind() == VISIBILITY) { + return None; + } + parent.range().start() + } else { + let ident = ctx.leaf_at_offset().find(|leaf| leaf.kind() == IDENT)?; + let field = ident.ancestors().find_map(ast::NamedFieldDef::cast)?; + if field.name()?.syntax().range() != ident.range() && field.visibility().is_some() { + return None; + } + field.syntax().range().start() + }; + + ctx.build("make pub(crate)", |edit| { + edit.insert(offset, "pub(crate) "); + edit.set_cursor(offset); + }) +} + +fn change_vis(ctx: AssistCtx, vis: &ast::Visibility) -> Option { + if vis.syntax().text() != "pub" { + return None; + } + ctx.build("chage to pub(crate)", |edit| { + edit.replace(vis.syntax().range(), "pub(crate)"); + edit.set_cursor(vis.syntax().range().start()); + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::assists::check_assist; + + #[test] + fn change_visibility_adds_pub_crate_to_items() { + check_assist( + change_visibility, + "<|>fn foo() {}", + "<|>pub(crate) fn foo() {}", + ); + check_assist( + change_visibility, + "f<|>n foo() {}", + "<|>pub(crate) fn foo() {}", + ); + check_assist( + change_visibility, + "<|>struct Foo {}", + "<|>pub(crate) struct Foo {}", + ); + check_assist( + change_visibility, + "<|>mod foo {}", + "<|>pub(crate) mod foo {}", + ); + check_assist( + change_visibility, + "<|>trait Foo {}", + "<|>pub(crate) trait Foo {}", + ); + check_assist(change_visibility, "m<|>od {}", "<|>pub(crate) mod {}"); + check_assist( + change_visibility, + "unsafe f<|>n foo() {}", + "<|>pub(crate) unsafe fn foo() {}", + ); + } + + #[test] + fn change_visibility_works_with_struct_fields() { + check_assist( + change_visibility, + "struct S { <|>field: u32 }", + "struct S { <|>pub(crate) field: u32 }", + ) + } + + #[test] + fn change_visibility_pub_to_pub_crate() { + check_assist( + change_visibility, + "<|>pub fn foo() {}", + "<|>pub(crate) fn foo() {}", + ) + } +} diff --git a/crates/ra_ide_api_light/src/assists/flip_comma.rs b/crates/ra_ide_api_light/src/assists/flip_comma.rs new file mode 100644 index 000000000..a343413cc --- /dev/null +++ b/crates/ra_ide_api_light/src/assists/flip_comma.rs @@ -0,0 +1,31 @@ +use ra_syntax::{ + Direction, + SyntaxKind::COMMA, +}; + +use crate::assists::{non_trivia_sibling, AssistCtx, Assist}; + +pub fn flip_comma(ctx: AssistCtx) -> Option { + let comma = ctx.leaf_at_offset().find(|leaf| leaf.kind() == COMMA)?; + let prev = non_trivia_sibling(comma, Direction::Prev)?; + let next = non_trivia_sibling(comma, Direction::Next)?; + ctx.build("flip comma", |edit| { + edit.replace(prev.range(), next.text()); + edit.replace(next.range(), prev.text()); + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::assists::check_assist; + + #[test] + fn flip_comma_works_for_function_parameters() { + check_assist( + flip_comma, + "fn foo(x: i32,<|> y: Result<(), ()>) {}", + "fn foo(y: Result<(), ()>,<|> x: i32) {}", + ) + } +} diff --git a/crates/ra_ide_api_light/src/assists/introduce_variable.rs b/crates/ra_ide_api_light/src/assists/introduce_variable.rs new file mode 100644 index 000000000..523ec7034 --- /dev/null +++ b/crates/ra_ide_api_light/src/assists/introduce_variable.rs @@ -0,0 +1,144 @@ +use ra_syntax::{ + ast::{self, AstNode}, + SyntaxKind::WHITESPACE, + SyntaxNode, TextUnit, +}; + +use crate::assists::{AssistCtx, Assist}; + +pub fn introduce_variable<'a>(ctx: AssistCtx) -> Option { + let node = ctx.covering_node(); + let expr = node.ancestors().filter_map(ast::Expr::cast).next()?; + + let anchor_stmt = anchor_stmt(expr)?; + let indent = anchor_stmt.prev_sibling()?; + if indent.kind() != WHITESPACE { + return None; + } + ctx.build("introduce variable", move |edit| { + let mut buf = String::new(); + + buf.push_str("let var_name = "); + expr.syntax().text().push_to(&mut buf); + let is_full_stmt = if let Some(expr_stmt) = ast::ExprStmt::cast(anchor_stmt) { + Some(expr.syntax()) == expr_stmt.expr().map(|e| e.syntax()) + } else { + false + }; + if is_full_stmt { + edit.replace(expr.syntax().range(), buf); + } else { + buf.push_str(";"); + indent.text().push_to(&mut buf); + edit.replace(expr.syntax().range(), "var_name".to_string()); + edit.insert(anchor_stmt.range().start(), buf); + } + edit.set_cursor(anchor_stmt.range().start() + TextUnit::of_str("let ")); + }) +} + +/// Statement or last in the block expression, which will follow +/// the freshly introduced var. +fn anchor_stmt(expr: &ast::Expr) -> Option<&SyntaxNode> { + expr.syntax().ancestors().find(|&node| { + if ast::Stmt::cast(node).is_some() { + return true; + } + if let Some(expr) = node + .parent() + .and_then(ast::Block::cast) + .and_then(|it| it.expr()) + { + if expr.syntax() == node { + return true; + } + } + false + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::assists::check_assist_range; + + #[test] + fn test_introduce_var_simple() { + check_assist_range( + introduce_variable, + " +fn foo() { + foo(<|>1 + 1<|>); +}", + " +fn foo() { + let <|>var_name = 1 + 1; + foo(var_name); +}", + ); + } + + #[test] + fn test_introduce_var_expr_stmt() { + check_assist_range( + introduce_variable, + " +fn foo() { + <|>1 + 1<|>; +}", + " +fn foo() { + let <|>var_name = 1 + 1; +}", + ); + } + + #[test] + fn test_introduce_var_part_of_expr_stmt() { + check_assist_range( + introduce_variable, + " +fn foo() { + <|>1<|> + 1; +}", + " +fn foo() { + let <|>var_name = 1; + var_name + 1; +}", + ); + } + + #[test] + fn test_introduce_var_last_expr() { + check_assist_range( + introduce_variable, + " +fn foo() { + bar(<|>1 + 1<|>) +}", + " +fn foo() { + let <|>var_name = 1 + 1; + bar(var_name) +}", + ); + } + + #[test] + fn test_introduce_var_last_full_expr() { + check_assist_range( + introduce_variable, + " +fn foo() { + <|>bar(1 + 1)<|> +}", + " +fn foo() { + let <|>var_name = bar(1 + 1); + var_name +}", + ); + } + +} diff --git a/crates/ra_ide_api_light/src/assists/replace_if_let_with_match.rs b/crates/ra_ide_api_light/src/assists/replace_if_let_with_match.rs new file mode 100644 index 000000000..30c371480 --- /dev/null +++ b/crates/ra_ide_api_light/src/assists/replace_if_let_with_match.rs @@ -0,0 +1,92 @@ +use ra_syntax::{ + AstNode, SyntaxKind::{L_CURLY, R_CURLY, WHITESPACE}, + ast, +}; + +use crate::assists::{AssistCtx, Assist}; + +pub fn replace_if_let_with_match(ctx: AssistCtx) -> Option { + let if_expr: &ast::IfExpr = ctx.node_at_offset()?; + let cond = if_expr.condition()?; + let pat = cond.pat()?; + let expr = cond.expr()?; + let then_block = if_expr.then_branch()?; + let else_block = if_expr.else_branch()?; + + ctx.build("replace with match", |edit| { + let match_expr = build_match_expr(expr, pat, then_block, else_block); + edit.replace_node_and_indent(if_expr.syntax(), match_expr); + edit.set_cursor(if_expr.syntax().range().start()) + }) +} + +fn build_match_expr( + expr: &ast::Expr, + pat1: &ast::Pat, + arm1: &ast::Block, + arm2: &ast::Block, +) -> String { + let mut buf = String::new(); + buf.push_str(&format!("match {} {{\n", expr.syntax().text())); + buf.push_str(&format!( + " {} => {}\n", + pat1.syntax().text(), + format_arm(arm1) + )); + buf.push_str(&format!(" _ => {}\n", format_arm(arm2))); + buf.push_str("}"); + buf +} + +fn format_arm(block: &ast::Block) -> String { + match extract_expression(block) { + None => block.syntax().text().to_string(), + Some(e) => format!("{},", e.syntax().text()), + } +} + +fn extract_expression(block: &ast::Block) -> Option<&ast::Expr> { + let expr = block.expr()?; + let non_trivial_children = block.syntax().children().filter(|it| { + !(it == &expr.syntax() + || it.kind() == L_CURLY + || it.kind() == R_CURLY + || it.kind() == WHITESPACE) + }); + if non_trivial_children.count() > 0 { + return None; + } + Some(expr) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::assists::check_assist; + + #[test] + fn test_replace_if_let_with_match_unwraps_simple_expressions() { + check_assist( + replace_if_let_with_match, + " +impl VariantData { + pub fn is_struct(&self) -> bool { + if <|>let VariantData::Struct(..) = *self { + true + } else { + false + } + } +} ", + " +impl VariantData { + pub fn is_struct(&self) -> bool { + <|>match *self { + VariantData::Struct(..) => true, + _ => false, + } + } +} ", + ) + } +} diff --git a/crates/ra_ide_api_light/src/assists/split_import.rs b/crates/ra_ide_api_light/src/assists/split_import.rs new file mode 100644 index 000000000..e4015f07d --- /dev/null +++ b/crates/ra_ide_api_light/src/assists/split_import.rs @@ -0,0 +1,56 @@ +use ra_syntax::{ + TextUnit, AstNode, SyntaxKind::COLONCOLON, + ast, + algo::generate, +}; + +use crate::assists::{AssistCtx, Assist}; + +pub fn split_import(ctx: AssistCtx) -> Option { + let colon_colon = ctx + .leaf_at_offset() + .find(|leaf| leaf.kind() == COLONCOLON)?; + let path = colon_colon.parent().and_then(ast::Path::cast)?; + let top_path = generate(Some(path), |it| it.parent_path()).last()?; + + let use_tree = top_path.syntax().ancestors().find_map(ast::UseTree::cast); + if use_tree.is_none() { + return None; + } + + let l_curly = colon_colon.range().end(); + let r_curly = match top_path.syntax().parent().and_then(ast::UseTree::cast) { + Some(tree) => tree.syntax().range().end(), + None => top_path.syntax().range().end(), + }; + + ctx.build("split import", |edit| { + edit.insert(l_curly, "{"); + edit.insert(r_curly, "}"); + edit.set_cursor(l_curly + TextUnit::of_str("{")); + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::assists::check_assist; + + #[test] + fn test_split_import() { + check_assist( + split_import, + "use crate::<|>db::RootDatabase;", + "use crate::{<|>db::RootDatabase};", + ) + } + + #[test] + fn split_import_works_with_trees() { + check_assist( + split_import, + "use algo:<|>:visitor::{Visitor, visit}", + "use algo::{<|>visitor::{Visitor, visit}}", + ) + } +} diff --git a/crates/ra_ide_api_light/src/diagnostics.rs b/crates/ra_ide_api_light/src/diagnostics.rs new file mode 100644 index 000000000..2b695dfdf --- /dev/null +++ b/crates/ra_ide_api_light/src/diagnostics.rs @@ -0,0 +1,266 @@ +use itertools::Itertools; + +use ra_syntax::{ + Location, SourceFile, SyntaxKind, TextRange, SyntaxNode, + ast::{self, AstNode}, + +}; +use ra_text_edit::{TextEdit, TextEditBuilder}; + +use crate::{Diagnostic, LocalEdit, Severity}; + +pub fn diagnostics(file: &SourceFile) -> Vec { + fn location_to_range(location: Location) -> TextRange { + match location { + Location::Offset(offset) => TextRange::offset_len(offset, 1.into()), + Location::Range(range) => range, + } + } + + let mut errors: Vec = file + .errors() + .into_iter() + .map(|err| Diagnostic { + range: location_to_range(err.location()), + msg: format!("Syntax Error: {}", err), + severity: Severity::Error, + fix: None, + }) + .collect(); + + for node in file.syntax().descendants() { + check_unnecessary_braces_in_use_statement(&mut errors, node); + check_struct_shorthand_initialization(&mut errors, node); + } + + errors +} + +fn check_unnecessary_braces_in_use_statement( + acc: &mut Vec, + node: &SyntaxNode, +) -> Option<()> { + let use_tree_list = ast::UseTreeList::cast(node)?; + if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() { + let range = use_tree_list.syntax().range(); + let edit = + text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(single_use_tree) + .unwrap_or_else(|| { + let to_replace = single_use_tree.syntax().text().to_string(); + let mut edit_builder = TextEditBuilder::default(); + edit_builder.delete(range); + edit_builder.insert(range.start(), to_replace); + edit_builder.finish() + }); + + acc.push(Diagnostic { + range, + msg: format!("Unnecessary braces in use statement"), + severity: Severity::WeakWarning, + fix: Some(LocalEdit { + label: "Remove unnecessary braces".to_string(), + edit, + cursor_position: None, + }), + }); + } + + Some(()) +} + +fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement( + single_use_tree: &ast::UseTree, +) -> Option { + let use_tree_list_node = single_use_tree.syntax().parent()?; + if single_use_tree + .path()? + .segment()? + .syntax() + .first_child()? + .kind() + == SyntaxKind::SELF_KW + { + let start = use_tree_list_node.prev_sibling()?.range().start(); + let end = use_tree_list_node.range().end(); + let range = TextRange::from_to(start, end); + let mut edit_builder = TextEditBuilder::default(); + edit_builder.delete(range); + return Some(edit_builder.finish()); + } + None +} + +fn check_struct_shorthand_initialization( + acc: &mut Vec, + node: &SyntaxNode, +) -> Option<()> { + let struct_lit = ast::StructLit::cast(node)?; + let named_field_list = struct_lit.named_field_list()?; + for named_field in named_field_list.fields() { + if let (Some(name_ref), Some(expr)) = (named_field.name_ref(), named_field.expr()) { + let field_name = name_ref.syntax().text().to_string(); + let field_expr = expr.syntax().text().to_string(); + if field_name == field_expr { + let mut edit_builder = TextEditBuilder::default(); + edit_builder.delete(named_field.syntax().range()); + edit_builder.insert(named_field.syntax().range().start(), field_name); + let edit = edit_builder.finish(); + + acc.push(Diagnostic { + range: named_field.syntax().range(), + msg: format!("Shorthand struct initialization"), + severity: Severity::WeakWarning, + fix: Some(LocalEdit { + label: "use struct shorthand initialization".to_string(), + edit, + cursor_position: None, + }), + }); + } + } + } + Some(()) +} + +#[cfg(test)] +mod tests { + use crate::test_utils::assert_eq_text; + + use super::*; + + type DiagnosticChecker = fn(&mut Vec, &SyntaxNode) -> Option<()>; + + fn check_not_applicable(code: &str, func: DiagnosticChecker) { + let file = SourceFile::parse(code); + let mut diagnostics = Vec::new(); + for node in file.syntax().descendants() { + func(&mut diagnostics, node); + } + assert!(diagnostics.is_empty()); + } + + fn check_apply(before: &str, after: &str, func: DiagnosticChecker) { + let file = SourceFile::parse(before); + let mut diagnostics = Vec::new(); + for node in file.syntax().descendants() { + func(&mut diagnostics, node); + } + let diagnostic = diagnostics + .pop() + .unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before)); + let fix = diagnostic.fix.unwrap(); + let actual = fix.edit.apply(&before); + assert_eq_text!(after, &actual); + } + + #[test] + fn test_check_unnecessary_braces_in_use_statement() { + check_not_applicable( + " + use a; + use a::{c, d::e}; + ", + check_unnecessary_braces_in_use_statement, + ); + check_apply( + "use {b};", + "use b;", + check_unnecessary_braces_in_use_statement, + ); + check_apply( + "use a::{c};", + "use a::c;", + check_unnecessary_braces_in_use_statement, + ); + check_apply( + "use a::{self};", + "use a;", + check_unnecessary_braces_in_use_statement, + ); + check_apply( + "use a::{c, d::{e}};", + "use a::{c, d::e};", + check_unnecessary_braces_in_use_statement, + ); + } + + #[test] + fn test_check_struct_shorthand_initialization() { + check_not_applicable( + r#" + struct A { + a: &'static str + } + + fn main() { + A { + a: "hello" + } + } + "#, + check_struct_shorthand_initialization, + ); + + check_apply( + r#" +struct A { + a: &'static str +} + +fn main() { + let a = "haha"; + A { + a: a + } +} + "#, + r#" +struct A { + a: &'static str +} + +fn main() { + let a = "haha"; + A { + a + } +} + "#, + check_struct_shorthand_initialization, + ); + + check_apply( + r#" +struct A { + a: &'static str, + b: &'static str +} + +fn main() { + let a = "haha"; + let b = "bb"; + A { + a: a, + b + } +} + "#, + r#" +struct A { + a: &'static str, + b: &'static str +} + +fn main() { + let a = "haha"; + let b = "bb"; + A { + a, + b + } +} + "#, + check_struct_shorthand_initialization, + ); + } +} diff --git a/crates/ra_ide_api_light/src/extend_selection.rs b/crates/ra_ide_api_light/src/extend_selection.rs new file mode 100644 index 000000000..08cae5a51 --- /dev/null +++ b/crates/ra_ide_api_light/src/extend_selection.rs @@ -0,0 +1,281 @@ +use ra_syntax::{ + Direction, SyntaxNode, TextRange, TextUnit, + algo::{find_covering_node, find_leaf_at_offset, LeafAtOffset}, + SyntaxKind::*, +}; + +pub fn extend_selection(root: &SyntaxNode, range: TextRange) -> Option { + let string_kinds = [COMMENT, STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING]; + if range.is_empty() { + let offset = range.start(); + let mut leaves = find_leaf_at_offset(root, offset); + if leaves.clone().all(|it| it.kind() == WHITESPACE) { + return Some(extend_ws(root, leaves.next()?, offset)); + } + let leaf_range = match leaves { + LeafAtOffset::None => return None, + LeafAtOffset::Single(l) => { + if string_kinds.contains(&l.kind()) { + extend_single_word_in_comment_or_string(l, offset).unwrap_or_else(|| l.range()) + } else { + l.range() + } + } + LeafAtOffset::Between(l, r) => pick_best(l, r).range(), + }; + return Some(leaf_range); + }; + let node = find_covering_node(root, range); + if string_kinds.contains(&node.kind()) && range == node.range() { + if let Some(range) = extend_comments(node) { + return Some(range); + } + } + + match node.ancestors().skip_while(|n| n.range() == range).next() { + None => None, + Some(parent) => Some(parent.range()), + } +} + +fn extend_single_word_in_comment_or_string( + leaf: &SyntaxNode, + offset: TextUnit, +) -> Option { + let text: &str = leaf.leaf_text()?; + let cursor_position: u32 = (offset - leaf.range().start()).into(); + + let (before, after) = text.split_at(cursor_position as usize); + + fn non_word_char(c: char) -> bool { + !(c.is_alphanumeric() || c == '_') + } + + let start_idx = before.rfind(non_word_char)? as u32; + let end_idx = after.find(non_word_char).unwrap_or(after.len()) as u32; + + let from: TextUnit = (start_idx + 1).into(); + let to: TextUnit = (cursor_position + end_idx).into(); + + let range = TextRange::from_to(from, to); + if range.is_empty() { + None + } else { + Some(range + leaf.range().start()) + } +} + +fn extend_ws(root: &SyntaxNode, ws: &SyntaxNode, offset: TextUnit) -> TextRange { + let ws_text = ws.leaf_text().unwrap(); + let suffix = TextRange::from_to(offset, ws.range().end()) - ws.range().start(); + let prefix = TextRange::from_to(ws.range().start(), offset) - ws.range().start(); + let ws_suffix = &ws_text.as_str()[suffix]; + let ws_prefix = &ws_text.as_str()[prefix]; + if ws_text.contains('\n') && !ws_suffix.contains('\n') { + if let Some(node) = ws.next_sibling() { + let start = match ws_prefix.rfind('\n') { + Some(idx) => ws.range().start() + TextUnit::from((idx + 1) as u32), + None => node.range().start(), + }; + let end = if root.text().char_at(node.range().end()) == Some('\n') { + node.range().end() + TextUnit::of_char('\n') + } else { + node.range().end() + }; + return TextRange::from_to(start, end); + } + } + ws.range() +} + +fn pick_best<'a>(l: &'a SyntaxNode, r: &'a SyntaxNode) -> &'a SyntaxNode { + return if priority(r) > priority(l) { r } else { l }; + fn priority(n: &SyntaxNode) -> usize { + match n.kind() { + WHITESPACE => 0, + IDENT | SELF_KW | SUPER_KW | CRATE_KW | LIFETIME => 2, + _ => 1, + } + } +} + +fn extend_comments(node: &SyntaxNode) -> Option { + let prev = adj_comments(node, Direction::Prev); + let next = adj_comments(node, Direction::Next); + if prev != next { + Some(TextRange::from_to(prev.range().start(), next.range().end())) + } else { + None + } +} + +fn adj_comments(node: &SyntaxNode, dir: Direction) -> &SyntaxNode { + let mut res = node; + for node in node.siblings(dir) { + match node.kind() { + COMMENT => res = node, + WHITESPACE if !node.leaf_text().unwrap().as_str().contains("\n\n") => (), + _ => break, + } + } + res +} + +#[cfg(test)] +mod tests { + use ra_syntax::{SourceFile, AstNode}; + use test_utils::extract_offset; + + use super::*; + + fn do_check(before: &str, afters: &[&str]) { + let (cursor, before) = extract_offset(before); + let file = SourceFile::parse(&before); + let mut range = TextRange::offset_len(cursor, 0.into()); + for &after in afters { + range = extend_selection(file.syntax(), range).unwrap(); + let actual = &before[range]; + assert_eq!(after, actual); + } + } + + #[test] + fn test_extend_selection_arith() { + do_check(r#"fn foo() { <|>1 + 1 }"#, &["1", "1 + 1", "{ 1 + 1 }"]); + } + + #[test] + fn test_extend_selection_start_of_the_lind() { + do_check( + r#" +impl S { +<|> fn foo() { + + } +}"#, + &[" fn foo() {\n\n }\n"], + ); + } + + #[test] + fn test_extend_selection_doc_comments() { + do_check( + r#" +struct A; + +/// bla +/// bla +struct B { + <|> +} + "#, + &[ + "\n \n", + "{\n \n}", + "/// bla\n/// bla\nstruct B {\n \n}", + ], + ) + } + + #[test] + fn test_extend_selection_comments() { + do_check( + r#" +fn bar(){} + +// fn foo() { +// 1 + <|>1 +// } + +// fn foo(){} + "#, + &["1", "// 1 + 1", "// fn foo() {\n// 1 + 1\n// }"], + ); + + do_check( + r#" +// #[derive(Debug, Clone, Copy, PartialEq, Eq)] +// pub enum Direction { +// <|> Next, +// Prev +// } +"#, + &[ + "// Next,", + "// #[derive(Debug, Clone, Copy, PartialEq, Eq)]\n// pub enum Direction {\n// Next,\n// Prev\n// }", + ], + ); + + do_check( + r#" +/* +foo +_bar1<|>*/ + "#, + &["_bar1", "/*\nfoo\n_bar1*/"], + ); + + do_check( + r#" +//!<|>foo_2 bar + "#, + &["foo_2", "//!foo_2 bar"], + ); + + do_check( + r#" +/<|>/foo bar + "#, + &["//foo bar"], + ); + } + + #[test] + fn test_extend_selection_prefer_idents() { + do_check( + r#" +fn main() { foo<|>+bar;} + "#, + &["foo", "foo+bar"], + ); + do_check( + r#" +fn main() { foo+<|>bar;} + "#, + &["bar", "foo+bar"], + ); + } + + #[test] + fn test_extend_selection_prefer_lifetimes() { + do_check(r#"fn foo<<|>'a>() {}"#, &["'a", "<'a>"]); + do_check(r#"fn foo<'a<|>>() {}"#, &["'a", "<'a>"]); + } + + #[test] + fn test_extend_selection_select_first_word() { + do_check(r#"// foo bar b<|>az quxx"#, &["baz", "// foo bar baz quxx"]); + do_check( + r#" +impl S { + fn foo() { + // hel<|>lo world + } +} + "#, + &["hello", "// hello world"], + ); + } + + #[test] + fn test_extend_selection_string() { + do_check( + r#" +fn bar(){} + +" fn f<|>oo() {" + "#, + &["foo", "\" fn foo() {\""], + ); + } +} diff --git a/crates/ra_ide_api_light/src/folding_ranges.rs b/crates/ra_ide_api_light/src/folding_ranges.rs new file mode 100644 index 000000000..6f3106889 --- /dev/null +++ b/crates/ra_ide_api_light/src/folding_ranges.rs @@ -0,0 +1,297 @@ +use rustc_hash::FxHashSet; + +use ra_syntax::{ + ast, AstNode, Direction, SourceFile, SyntaxNode, TextRange, + SyntaxKind::{self, *}, +}; + +#[derive(Debug, PartialEq, Eq)] +pub enum FoldKind { + Comment, + Imports, + Block, +} + +#[derive(Debug)] +pub struct Fold { + pub range: TextRange, + pub kind: FoldKind, +} + +pub fn folding_ranges(file: &SourceFile) -> Vec { + let mut res = vec![]; + let mut visited_comments = FxHashSet::default(); + let mut visited_imports = FxHashSet::default(); + + for node in file.syntax().descendants() { + // Fold items that span multiple lines + if let Some(kind) = fold_kind(node.kind()) { + if has_newline(node) { + res.push(Fold { + range: node.range(), + kind, + }); + } + } + + // Fold groups of comments + if node.kind() == COMMENT && !visited_comments.contains(&node) { + if let Some(range) = contiguous_range_for_comment(node, &mut visited_comments) { + res.push(Fold { + range, + kind: FoldKind::Comment, + }) + } + } + + // Fold groups of imports + if node.kind() == USE_ITEM && !visited_imports.contains(&node) { + if let Some(range) = contiguous_range_for_group(node, &mut visited_imports) { + res.push(Fold { + range, + kind: FoldKind::Imports, + }) + } + } + } + + res +} + +fn fold_kind(kind: SyntaxKind) -> Option { + match kind { + COMMENT => Some(FoldKind::Comment), + USE_ITEM => Some(FoldKind::Imports), + NAMED_FIELD_DEF_LIST | FIELD_PAT_LIST | ITEM_LIST | EXTERN_ITEM_LIST | USE_TREE_LIST + | BLOCK | ENUM_VARIANT_LIST => Some(FoldKind::Block), + _ => None, + } +} + +fn has_newline(node: &SyntaxNode) -> bool { + for descendant in node.descendants() { + if let Some(ws) = ast::Whitespace::cast(descendant) { + if ws.has_newlines() { + return true; + } + } else if let Some(comment) = ast::Comment::cast(descendant) { + if comment.has_newlines() { + return true; + } + } + } + + false +} + +fn contiguous_range_for_group<'a>( + first: &'a SyntaxNode, + visited: &mut FxHashSet<&'a SyntaxNode>, +) -> Option { + visited.insert(first); + + let mut last = first; + for node in first.siblings(Direction::Next) { + if let Some(ws) = ast::Whitespace::cast(node) { + // There is a blank line, which means that the group ends here + if ws.count_newlines_lazy().take(2).count() == 2 { + break; + } + + // Ignore whitespace without blank lines + continue; + } + + // Stop if we find a node that doesn't belong to the group + if node.kind() != first.kind() { + break; + } + + visited.insert(node); + last = node; + } + + if first != last { + Some(TextRange::from_to( + first.range().start(), + last.range().end(), + )) + } else { + // The group consists of only one element, therefore it cannot be folded + None + } +} + +fn contiguous_range_for_comment<'a>( + first: &'a SyntaxNode, + visited: &mut FxHashSet<&'a SyntaxNode>, +) -> Option { + visited.insert(first); + + // Only fold comments of the same flavor + let group_flavor = ast::Comment::cast(first)?.flavor(); + + let mut last = first; + for node in first.siblings(Direction::Next) { + if let Some(ws) = ast::Whitespace::cast(node) { + // There is a blank line, which means the group ends here + if ws.count_newlines_lazy().take(2).count() == 2 { + break; + } + + // Ignore whitespace without blank lines + continue; + } + + match ast::Comment::cast(node) { + Some(next_comment) if next_comment.flavor() == group_flavor => { + visited.insert(node); + last = node; + } + // The comment group ends because either: + // * An element of a different kind was reached + // * A comment of a different flavor was reached + _ => break, + } + } + + if first != last { + Some(TextRange::from_to( + first.range().start(), + last.range().end(), + )) + } else { + // The group consists of only one element, therefore it cannot be folded + None + } +} + +#[cfg(test)] +mod tests { + use super::*; + use test_utils::extract_ranges; + + fn do_check(text: &str, fold_kinds: &[FoldKind]) { + let (ranges, text) = extract_ranges(text, "fold"); + let file = SourceFile::parse(&text); + let folds = folding_ranges(&file); + + assert_eq!( + folds.len(), + ranges.len(), + "The amount of folds is different than the expected amount" + ); + assert_eq!( + folds.len(), + fold_kinds.len(), + "The amount of fold kinds is different than the expected amount" + ); + for ((fold, range), fold_kind) in folds + .into_iter() + .zip(ranges.into_iter()) + .zip(fold_kinds.into_iter()) + { + assert_eq!(fold.range.start(), range.start()); + assert_eq!(fold.range.end(), range.end()); + assert_eq!(&fold.kind, fold_kind); + } + } + + #[test] + fn test_fold_comments() { + let text = r#" +// Hello +// this is a multiline +// comment +// + +// But this is not + +fn main() { + // We should + // also + // fold + // this one. + //! But this one is different + //! because it has another flavor + /* As does this + multiline comment */ +}"#; + + let fold_kinds = &[ + FoldKind::Comment, + FoldKind::Block, + FoldKind::Comment, + FoldKind::Comment, + FoldKind::Comment, + ]; + do_check(text, fold_kinds); + } + + #[test] + fn test_fold_imports() { + let text = r#" +use std::{ + str, + vec, + io as iop +}; + +fn main() { +}"#; + + let folds = &[FoldKind::Imports, FoldKind::Block, FoldKind::Block]; + do_check(text, folds); + } + + #[test] + fn test_fold_import_groups() { + let text = r#" +use std::str; +use std::vec; +use std::io as iop; + +use std::mem; +use std::f64; + +use std::collections::HashMap; +// Some random comment +use std::collections::VecDeque; + +fn main() { +}"#; + + let folds = &[FoldKind::Imports, FoldKind::Imports, FoldKind::Block]; + do_check(text, folds); + } + + #[test] + fn test_fold_import_and_groups() { + let text = r#" +use std::str; +use std::vec; +use std::io as iop; + +use std::mem; +use std::f64; + +use std::collections::{ + HashMap, + VecDeque, +}; +// Some random comment + +fn main() { +}"#; + + let folds = &[ + FoldKind::Imports, + FoldKind::Imports, + FoldKind::Imports, + FoldKind::Block, + FoldKind::Block, + ]; + do_check(text, folds); + } + +} diff --git a/crates/ra_ide_api_light/src/lib.rs b/crates/ra_ide_api_light/src/lib.rs new file mode 100644 index 000000000..5a6af19b7 --- /dev/null +++ b/crates/ra_ide_api_light/src/lib.rs @@ -0,0 +1,168 @@ +pub mod assists; +mod extend_selection; +mod folding_ranges; +mod line_index; +mod line_index_utils; +mod structure; +#[cfg(test)] +mod test_utils; +mod typing; +mod diagnostics; + +pub use self::{ + assists::LocalEdit, + extend_selection::extend_selection, + folding_ranges::{folding_ranges, Fold, FoldKind}, + line_index::{LineCol, LineIndex}, + line_index_utils::translate_offset_with_edit, + structure::{file_structure, StructureNode}, + typing::{join_lines, on_enter, on_dot_typed, on_eq_typed}, + diagnostics::diagnostics +}; +use ra_text_edit::TextEditBuilder; +use ra_syntax::{ + SourceFile, SyntaxNode, TextRange, TextUnit, Direction, + SyntaxKind::{self, *}, + ast::{self, AstNode}, + algo::find_leaf_at_offset, +}; +use rustc_hash::FxHashSet; + +#[derive(Debug)] +pub struct HighlightedRange { + pub range: TextRange, + pub tag: &'static str, +} + +#[derive(Debug, Copy, Clone)] +pub enum Severity { + Error, + WeakWarning, +} + +#[derive(Debug)] +pub struct Diagnostic { + pub range: TextRange, + pub msg: String, + pub severity: Severity, + pub fix: Option, +} + +pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option { + const BRACES: &[SyntaxKind] = &[ + L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE, + ]; + let (brace_node, brace_idx) = find_leaf_at_offset(file.syntax(), offset) + .filter_map(|node| { + let idx = BRACES.iter().position(|&brace| brace == node.kind())?; + Some((node, idx)) + }) + .next()?; + let parent = brace_node.parent()?; + let matching_kind = BRACES[brace_idx ^ 1]; + let matching_node = parent + .children() + .find(|node| node.kind() == matching_kind)?; + Some(matching_node.range().start()) +} + +pub fn highlight(root: &SyntaxNode) -> Vec { + // Visited nodes to handle highlighting priorities + let mut highlighted = FxHashSet::default(); + let mut res = Vec::new(); + for node in root.descendants() { + if highlighted.contains(&node) { + continue; + } + let tag = match node.kind() { + COMMENT => "comment", + STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => "string", + ATTR => "attribute", + NAME_REF => "text", + NAME => "function", + INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE => "literal", + LIFETIME => "parameter", + k if k.is_keyword() => "keyword", + _ => { + if let Some(macro_call) = ast::MacroCall::cast(node) { + if let Some(path) = macro_call.path() { + if let Some(segment) = path.segment() { + if let Some(name_ref) = segment.name_ref() { + highlighted.insert(name_ref.syntax()); + let range_start = name_ref.syntax().range().start(); + let mut range_end = name_ref.syntax().range().end(); + for sibling in path.syntax().siblings(Direction::Next) { + match sibling.kind() { + EXCL | IDENT => range_end = sibling.range().end(), + _ => (), + } + } + res.push(HighlightedRange { + range: TextRange::from_to(range_start, range_end), + tag: "macro", + }) + } + } + } + } + continue; + } + }; + res.push(HighlightedRange { + range: node.range(), + tag, + }) + } + res +} + +pub fn syntax_tree(file: &SourceFile) -> String { + ::ra_syntax::utils::dump_tree(file.syntax()) +} + +#[cfg(test)] +mod tests { + use ra_syntax::AstNode; + + use crate::test_utils::{add_cursor, assert_eq_dbg, assert_eq_text, extract_offset}; + + use super::*; + + #[test] + fn test_highlighting() { + let file = SourceFile::parse( + r#" +// comment +fn main() {} + println!("Hello, {}!", 92); +"#, + ); + let hls = highlight(file.syntax()); + assert_eq_dbg( + r#"[HighlightedRange { range: [1; 11), tag: "comment" }, + HighlightedRange { range: [12; 14), tag: "keyword" }, + HighlightedRange { range: [15; 19), tag: "function" }, + HighlightedRange { range: [29; 37), tag: "macro" }, + HighlightedRange { range: [38; 50), tag: "string" }, + HighlightedRange { range: [52; 54), tag: "literal" }]"#, + &hls, + ); + } + + #[test] + fn test_matching_brace() { + fn do_check(before: &str, after: &str) { + let (pos, before) = extract_offset(before); + let file = SourceFile::parse(&before); + let new_pos = match matching_brace(&file, pos) { + None => pos, + Some(pos) => pos, + }; + let actual = add_cursor(&before, new_pos); + assert_eq_text!(after, &actual); + } + + do_check("struct Foo { a: i32, }<|>", "struct Foo <|>{ a: i32, }"); + } + +} diff --git a/crates/ra_ide_api_light/src/line_index.rs b/crates/ra_ide_api_light/src/line_index.rs new file mode 100644 index 000000000..898fee7e0 --- /dev/null +++ b/crates/ra_ide_api_light/src/line_index.rs @@ -0,0 +1,399 @@ +use crate::TextUnit; +use rustc_hash::FxHashMap; +use superslice::Ext; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct LineIndex { + pub(crate) newlines: Vec, + pub(crate) utf16_lines: FxHashMap>, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct LineCol { + pub line: u32, + pub col_utf16: u32, +} + +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub(crate) struct Utf16Char { + pub(crate) start: TextUnit, + pub(crate) end: TextUnit, +} + +impl Utf16Char { + fn len(&self) -> TextUnit { + self.end - self.start + } +} + +impl LineIndex { + pub fn new(text: &str) -> LineIndex { + let mut utf16_lines = FxHashMap::default(); + let mut utf16_chars = Vec::new(); + + let mut newlines = vec![0.into()]; + let mut curr_row = 0.into(); + let mut curr_col = 0.into(); + let mut line = 0; + for c in text.chars() { + curr_row += TextUnit::of_char(c); + if c == '\n' { + newlines.push(curr_row); + + // Save any utf-16 characters seen in the previous line + if utf16_chars.len() > 0 { + utf16_lines.insert(line, utf16_chars); + utf16_chars = Vec::new(); + } + + // Prepare for processing the next line + curr_col = 0.into(); + line += 1; + continue; + } + + let char_len = TextUnit::of_char(c); + if char_len.to_usize() > 1 { + utf16_chars.push(Utf16Char { + start: curr_col, + end: curr_col + char_len, + }); + } + + curr_col += char_len; + } + + // Save any utf-16 characters seen in the last line + if utf16_chars.len() > 0 { + utf16_lines.insert(line, utf16_chars); + } + + LineIndex { + newlines, + utf16_lines, + } + } + + pub fn line_col(&self, offset: TextUnit) -> LineCol { + let line = self.newlines.upper_bound(&offset) - 1; + let line_start_offset = self.newlines[line]; + let col = offset - line_start_offset; + + LineCol { + line: line as u32, + col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32, + } + } + + pub fn offset(&self, line_col: LineCol) -> TextUnit { + //TODO: return Result + let col = self.utf16_to_utf8_col(line_col.line, line_col.col_utf16); + self.newlines[line_col.line as usize] + col + } + + fn utf8_to_utf16_col(&self, line: u32, mut col: TextUnit) -> usize { + if let Some(utf16_chars) = self.utf16_lines.get(&line) { + let mut correction = TextUnit::from_usize(0); + for c in utf16_chars { + if col >= c.end { + correction += c.len() - TextUnit::from_usize(1); + } else { + // From here on, all utf16 characters come *after* the character we are mapping, + // so we don't need to take them into account + break; + } + } + + col -= correction; + } + + col.to_usize() + } + + fn utf16_to_utf8_col(&self, line: u32, col: u32) -> TextUnit { + let mut col: TextUnit = col.into(); + if let Some(utf16_chars) = self.utf16_lines.get(&line) { + for c in utf16_chars { + if col >= c.start { + col += c.len() - TextUnit::from_usize(1); + } else { + // From here on, all utf16 characters come *after* the character we are mapping, + // so we don't need to take them into account + break; + } + } + } + + col + } +} + +#[cfg(test)] +/// Simple reference implementation to use in proptests +pub fn to_line_col(text: &str, offset: TextUnit) -> LineCol { + let mut res = LineCol { + line: 0, + col_utf16: 0, + }; + for (i, c) in text.char_indices() { + if i + c.len_utf8() > offset.to_usize() { + // if it's an invalid offset, inside a multibyte char + // return as if it was at the start of the char + break; + } + if c == '\n' { + res.line += 1; + res.col_utf16 = 0; + } else { + res.col_utf16 += 1; + } + } + res +} + +#[cfg(test)] +mod test_line_index { + use super::*; + use proptest::{prelude::*, proptest, proptest_helper}; + use ra_text_edit::test_utils::{arb_text, arb_offset}; + + #[test] + fn test_line_index() { + let text = "hello\nworld"; + let index = LineIndex::new(text); + assert_eq!( + index.line_col(0.into()), + LineCol { + line: 0, + col_utf16: 0 + } + ); + assert_eq!( + index.line_col(1.into()), + LineCol { + line: 0, + col_utf16: 1 + } + ); + assert_eq!( + index.line_col(5.into()), + LineCol { + line: 0, + col_utf16: 5 + } + ); + assert_eq!( + index.line_col(6.into()), + LineCol { + line: 1, + col_utf16: 0 + } + ); + assert_eq!( + index.line_col(7.into()), + LineCol { + line: 1, + col_utf16: 1 + } + ); + assert_eq!( + index.line_col(8.into()), + LineCol { + line: 1, + col_utf16: 2 + } + ); + assert_eq!( + index.line_col(10.into()), + LineCol { + line: 1, + col_utf16: 4 + } + ); + assert_eq!( + index.line_col(11.into()), + LineCol { + line: 1, + col_utf16: 5 + } + ); + assert_eq!( + index.line_col(12.into()), + LineCol { + line: 1, + col_utf16: 6 + } + ); + + let text = "\nhello\nworld"; + let index = LineIndex::new(text); + assert_eq!( + index.line_col(0.into()), + LineCol { + line: 0, + col_utf16: 0 + } + ); + assert_eq!( + index.line_col(1.into()), + LineCol { + line: 1, + col_utf16: 0 + } + ); + assert_eq!( + index.line_col(2.into()), + LineCol { + line: 1, + col_utf16: 1 + } + ); + assert_eq!( + index.line_col(6.into()), + LineCol { + line: 1, + col_utf16: 5 + } + ); + assert_eq!( + index.line_col(7.into()), + LineCol { + line: 2, + col_utf16: 0 + } + ); + } + + fn arb_text_with_offset() -> BoxedStrategy<(TextUnit, String)> { + arb_text() + .prop_flat_map(|text| (arb_offset(&text), Just(text))) + .boxed() + } + + fn to_line_col(text: &str, offset: TextUnit) -> LineCol { + let mut res = LineCol { + line: 0, + col_utf16: 0, + }; + for (i, c) in text.char_indices() { + if i + c.len_utf8() > offset.to_usize() { + // if it's an invalid offset, inside a multibyte char + // return as if it was at the start of the char + break; + } + if c == '\n' { + res.line += 1; + res.col_utf16 = 0; + } else { + res.col_utf16 += 1; + } + } + res + } + + proptest! { + #[test] + fn test_line_index_proptest((offset, text) in arb_text_with_offset()) { + let expected = to_line_col(&text, offset); + let line_index = LineIndex::new(&text); + let actual = line_index.line_col(offset); + + assert_eq!(actual, expected); + } + } +} + +#[cfg(test)] +mod test_utf8_utf16_conv { + use super::*; + + #[test] + fn test_char_len() { + assert_eq!('メ'.len_utf8(), 3); + assert_eq!('メ'.len_utf16(), 1); + } + + #[test] + fn test_empty_index() { + let col_index = LineIndex::new( + " +const C: char = 'x'; +", + ); + assert_eq!(col_index.utf16_lines.len(), 0); + } + + #[test] + fn test_single_char() { + let col_index = LineIndex::new( + " +const C: char = 'メ'; +", + ); + + assert_eq!(col_index.utf16_lines.len(), 1); + assert_eq!(col_index.utf16_lines[&1].len(), 1); + assert_eq!( + col_index.utf16_lines[&1][0], + Utf16Char { + start: 17.into(), + end: 20.into() + } + ); + + // UTF-8 to UTF-16, no changes + assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); + + // UTF-8 to UTF-16 + assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20); + + // UTF-16 to UTF-8, no changes + assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextUnit::from(15)); + + // UTF-16 to UTF-8 + assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextUnit::from(21)); + } + + #[test] + fn test_string() { + let col_index = LineIndex::new( + " +const C: char = \"メ メ\"; +", + ); + + assert_eq!(col_index.utf16_lines.len(), 1); + assert_eq!(col_index.utf16_lines[&1].len(), 2); + assert_eq!( + col_index.utf16_lines[&1][0], + Utf16Char { + start: 17.into(), + end: 20.into() + } + ); + assert_eq!( + col_index.utf16_lines[&1][1], + Utf16Char { + start: 21.into(), + end: 24.into() + } + ); + + // UTF-8 to UTF-16 + assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); + + assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19); + assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21); + + assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15); + + // UTF-16 to UTF-8 + assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextUnit::from_usize(15)); + + assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextUnit::from_usize(20)); + assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextUnit::from_usize(23)); + + assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextUnit::from_usize(15)); + } + +} diff --git a/crates/ra_ide_api_light/src/line_index_utils.rs b/crates/ra_ide_api_light/src/line_index_utils.rs new file mode 100644 index 000000000..ec3269bbb --- /dev/null +++ b/crates/ra_ide_api_light/src/line_index_utils.rs @@ -0,0 +1,363 @@ +use ra_text_edit::{AtomTextEdit, TextEdit}; +use ra_syntax::{TextUnit, TextRange}; +use crate::{LineIndex, LineCol, line_index::Utf16Char}; + +#[derive(Debug, Clone)] +enum Step { + Newline(TextUnit), + Utf16Char(TextRange), +} + +#[derive(Debug)] +struct LineIndexStepIter<'a> { + line_index: &'a LineIndex, + next_newline_idx: usize, + utf16_chars: Option<(TextUnit, std::slice::Iter<'a, Utf16Char>)>, +} + +impl<'a> LineIndexStepIter<'a> { + fn from(line_index: &LineIndex) -> LineIndexStepIter { + let mut x = LineIndexStepIter { + line_index, + next_newline_idx: 0, + utf16_chars: None, + }; + // skip first newline since it's not real + x.next(); + x + } +} + +impl<'a> Iterator for LineIndexStepIter<'a> { + type Item = Step; + fn next(&mut self) -> Option { + self.utf16_chars + .as_mut() + .and_then(|(newline, x)| { + let x = x.next()?; + Some(Step::Utf16Char(TextRange::from_to( + *newline + x.start, + *newline + x.end, + ))) + }) + .or_else(|| { + let next_newline = *self.line_index.newlines.get(self.next_newline_idx)?; + self.utf16_chars = self + .line_index + .utf16_lines + .get(&(self.next_newline_idx as u32)) + .map(|x| (next_newline, x.iter())); + self.next_newline_idx += 1; + Some(Step::Newline(next_newline)) + }) + } +} + +#[derive(Debug)] +struct OffsetStepIter<'a> { + text: &'a str, + offset: TextUnit, +} + +impl<'a> Iterator for OffsetStepIter<'a> { + type Item = Step; + fn next(&mut self) -> Option { + let (next, next_offset) = self + .text + .char_indices() + .filter_map(|(i, c)| { + if c == '\n' { + let next_offset = self.offset + TextUnit::from_usize(i + 1); + let next = Step::Newline(next_offset); + Some((next, next_offset)) + } else { + let char_len = TextUnit::of_char(c); + if char_len.to_usize() > 1 { + let start = self.offset + TextUnit::from_usize(i); + let end = start + char_len; + let next = Step::Utf16Char(TextRange::from_to(start, end)); + let next_offset = end; + Some((next, next_offset)) + } else { + None + } + } + }) + .next()?; + let next_idx = (next_offset - self.offset).to_usize(); + self.text = &self.text[next_idx..]; + self.offset = next_offset; + Some(next) + } +} + +#[derive(Debug)] +enum NextSteps<'a> { + Use, + ReplaceMany(OffsetStepIter<'a>), + AddMany(OffsetStepIter<'a>), +} + +#[derive(Debug)] +struct TranslatedEdit<'a> { + delete: TextRange, + insert: &'a str, + diff: i64, +} + +struct Edits<'a> { + edits: &'a [AtomTextEdit], + current: Option>, + acc_diff: i64, +} + +impl<'a> Edits<'a> { + fn from_text_edit(text_edit: &'a TextEdit) -> Edits<'a> { + let mut x = Edits { + edits: text_edit.as_atoms(), + current: None, + acc_diff: 0, + }; + x.advance_edit(); + x + } + fn advance_edit(&mut self) { + self.acc_diff += self.current.as_ref().map_or(0, |x| x.diff); + match self.edits.split_first() { + Some((next, rest)) => { + let delete = self.translate_range(next.delete); + let diff = next.insert.len() as i64 - next.delete.len().to_usize() as i64; + self.current = Some(TranslatedEdit { + delete, + insert: &next.insert, + diff, + }); + self.edits = rest; + } + None => { + self.current = None; + } + } + } + + fn next_inserted_steps(&mut self) -> Option> { + let cur = self.current.as_ref()?; + let res = Some(OffsetStepIter { + offset: cur.delete.start(), + text: &cur.insert, + }); + self.advance_edit(); + res + } + + fn next_steps(&mut self, step: &Step) -> NextSteps { + let step_pos = match step { + &Step::Newline(n) => n, + &Step::Utf16Char(r) => r.end(), + }; + let res = match &mut self.current { + Some(edit) => { + if step_pos <= edit.delete.start() { + NextSteps::Use + } else if step_pos <= edit.delete.end() { + let iter = OffsetStepIter { + offset: edit.delete.start(), + text: &edit.insert, + }; + // empty slice to avoid returning steps again + edit.insert = &edit.insert[edit.insert.len()..]; + NextSteps::ReplaceMany(iter) + } else { + let iter = OffsetStepIter { + offset: edit.delete.start(), + text: &edit.insert, + }; + // empty slice to avoid returning steps again + edit.insert = &edit.insert[edit.insert.len()..]; + self.advance_edit(); + NextSteps::AddMany(iter) + } + } + None => NextSteps::Use, + }; + res + } + + fn translate_range(&self, range: TextRange) -> TextRange { + if self.acc_diff == 0 { + range + } else { + let start = self.translate(range.start()); + let end = self.translate(range.end()); + TextRange::from_to(start, end) + } + } + + fn translate(&self, x: TextUnit) -> TextUnit { + if self.acc_diff == 0 { + x + } else { + TextUnit::from((x.to_usize() as i64 + self.acc_diff) as u32) + } + } + + fn translate_step(&self, x: &Step) -> Step { + if self.acc_diff == 0 { + x.clone() + } else { + match x { + &Step::Newline(n) => Step::Newline(self.translate(n)), + &Step::Utf16Char(r) => Step::Utf16Char(self.translate_range(r)), + } + } + } +} + +#[derive(Debug)] +struct RunningLineCol { + line: u32, + last_newline: TextUnit, + col_adjust: TextUnit, +} + +impl RunningLineCol { + fn new() -> RunningLineCol { + RunningLineCol { + line: 0, + last_newline: TextUnit::from(0), + col_adjust: TextUnit::from(0), + } + } + + fn to_line_col(&self, offset: TextUnit) -> LineCol { + LineCol { + line: self.line, + col_utf16: ((offset - self.last_newline) - self.col_adjust).into(), + } + } + + fn add_line(&mut self, newline: TextUnit) { + self.line += 1; + self.last_newline = newline; + self.col_adjust = TextUnit::from(0); + } + + fn adjust_col(&mut self, range: &TextRange) { + self.col_adjust += range.len() - TextUnit::from(1); + } +} + +pub fn translate_offset_with_edit( + line_index: &LineIndex, + offset: TextUnit, + text_edit: &TextEdit, +) -> LineCol { + let mut state = Edits::from_text_edit(&text_edit); + + let mut res = RunningLineCol::new(); + + macro_rules! test_step { + ($x:ident) => { + match &$x { + Step::Newline(n) => { + if offset < *n { + return res.to_line_col(offset); + } else { + res.add_line(*n); + } + } + Step::Utf16Char(x) => { + if offset < x.end() { + // if the offset is inside a multibyte char it's invalid + // clamp it to the start of the char + let clamp = offset.min(x.start()); + return res.to_line_col(clamp); + } else { + res.adjust_col(x); + } + } + } + }; + } + + for orig_step in LineIndexStepIter::from(line_index) { + loop { + let translated_step = state.translate_step(&orig_step); + match state.next_steps(&translated_step) { + NextSteps::Use => { + test_step!(translated_step); + break; + } + NextSteps::ReplaceMany(ns) => { + for n in ns { + test_step!(n); + } + break; + } + NextSteps::AddMany(ns) => { + for n in ns { + test_step!(n); + } + } + } + } + } + + loop { + match state.next_inserted_steps() { + None => break, + Some(ns) => { + for n in ns { + test_step!(n); + } + } + } + } + + res.to_line_col(offset) +} + +#[cfg(test)] +mod test { + use super::*; + use proptest::{prelude::*, proptest, proptest_helper}; + use crate::line_index; + use ra_text_edit::test_utils::{arb_offset, arb_text_with_edit}; + use ra_text_edit::TextEdit; + + #[derive(Debug)] + struct ArbTextWithEditAndOffset { + text: String, + edit: TextEdit, + edited_text: String, + offset: TextUnit, + } + + fn arb_text_with_edit_and_offset() -> BoxedStrategy { + arb_text_with_edit() + .prop_flat_map(|x| { + let edited_text = x.edit.apply(&x.text); + let arb_offset = arb_offset(&edited_text); + (Just(x), Just(edited_text), arb_offset).prop_map(|(x, edited_text, offset)| { + ArbTextWithEditAndOffset { + text: x.text, + edit: x.edit, + edited_text, + offset, + } + }) + }) + .boxed() + } + + proptest! { + #[test] + fn test_translate_offset_with_edit(x in arb_text_with_edit_and_offset()) { + let expected = line_index::to_line_col(&x.edited_text, x.offset); + let line_index = LineIndex::new(&x.text); + let actual = translate_offset_with_edit(&line_index, x.offset, &x.edit); + + assert_eq!(actual, expected); + } + } +} diff --git a/crates/ra_ide_api_light/src/structure.rs b/crates/ra_ide_api_light/src/structure.rs new file mode 100644 index 000000000..8bd57555f --- /dev/null +++ b/crates/ra_ide_api_light/src/structure.rs @@ -0,0 +1,129 @@ +use crate::TextRange; + +use ra_syntax::{ + algo::visit::{visitor, Visitor}, + ast::{self, NameOwner}, + AstNode, SourceFile, SyntaxKind, SyntaxNode, WalkEvent, +}; + +#[derive(Debug, Clone)] +pub struct StructureNode { + pub parent: Option, + pub label: String, + pub navigation_range: TextRange, + pub node_range: TextRange, + pub kind: SyntaxKind, +} + +pub fn file_structure(file: &SourceFile) -> Vec { + let mut res = Vec::new(); + let mut stack = Vec::new(); + + for event in file.syntax().preorder() { + match event { + WalkEvent::Enter(node) => { + if let Some(mut symbol) = structure_node(node) { + symbol.parent = stack.last().map(|&n| n); + stack.push(res.len()); + res.push(symbol); + } + } + WalkEvent::Leave(node) => { + if structure_node(node).is_some() { + stack.pop().unwrap(); + } + } + } + } + res +} + +fn structure_node(node: &SyntaxNode) -> Option { + fn decl(node: &N) -> Option { + let name = node.name()?; + Some(StructureNode { + parent: None, + label: name.text().to_string(), + navigation_range: name.syntax().range(), + node_range: node.syntax().range(), + kind: node.syntax().kind(), + }) + } + + visitor() + .visit(decl::) + .visit(decl::) + .visit(decl::) + .visit(decl::) + .visit(decl::) + .visit(decl::) + .visit(decl::) + .visit(decl::) + .visit(decl::) + .visit(|im: &ast::ImplBlock| { + let target_type = im.target_type()?; + let target_trait = im.target_trait(); + let label = match target_trait { + None => format!("impl {}", target_type.syntax().text()), + Some(t) => format!( + "impl {} for {}", + t.syntax().text(), + target_type.syntax().text(), + ), + }; + + let node = StructureNode { + parent: None, + label, + navigation_range: target_type.syntax().range(), + node_range: im.syntax().range(), + kind: im.syntax().kind(), + }; + Some(node) + }) + .accept(node)? +} + +#[cfg(test)] +mod tests { + use super::*; + use test_utils::assert_eq_dbg; + + #[test] + fn test_file_structure() { + let file = SourceFile::parse( + r#" +struct Foo { + x: i32 +} + +mod m { + fn bar() {} +} + +enum E { X, Y(i32) } +type T = (); +static S: i32 = 92; +const C: i32 = 92; + +impl E {} + +impl fmt::Debug for E {} +"#, + ); + let structure = file_structure(&file); + assert_eq_dbg( + r#"[StructureNode { parent: None, label: "Foo", navigation_range: [8; 11), node_range: [1; 26), kind: STRUCT_DEF }, + StructureNode { parent: Some(0), label: "x", navigation_range: [18; 19), node_range: [18; 24), kind: NAMED_FIELD_DEF }, + StructureNode { parent: None, label: "m", navigation_range: [32; 33), node_range: [28; 53), kind: MODULE }, + StructureNode { parent: Some(2), label: "bar", navigation_range: [43; 46), node_range: [40; 51), kind: FN_DEF }, + StructureNode { parent: None, label: "E", navigation_range: [60; 61), node_range: [55; 75), kind: ENUM_DEF }, + StructureNode { parent: None, label: "T", navigation_range: [81; 82), node_range: [76; 88), kind: TYPE_DEF }, + StructureNode { parent: None, label: "S", navigation_range: [96; 97), node_range: [89; 108), kind: STATIC_DEF }, + StructureNode { parent: None, label: "C", navigation_range: [115; 116), node_range: [109; 127), kind: CONST_DEF }, + StructureNode { parent: None, label: "impl E", navigation_range: [134; 135), node_range: [129; 138), kind: IMPL_BLOCK }, + StructureNode { parent: None, label: "impl fmt::Debug for E", navigation_range: [160; 161), node_range: [140; 164), kind: IMPL_BLOCK }]"#, + &structure, + ) + } +} diff --git a/crates/ra_ide_api_light/src/test_utils.rs b/crates/ra_ide_api_light/src/test_utils.rs new file mode 100644 index 000000000..dc2470aa3 --- /dev/null +++ b/crates/ra_ide_api_light/src/test_utils.rs @@ -0,0 +1,41 @@ +use ra_syntax::{SourceFile, TextRange, TextUnit}; + +use crate::LocalEdit; +pub use test_utils::*; + +pub fn check_action Option>( + before: &str, + after: &str, + f: F, +) { + let (before_cursor_pos, before) = extract_offset(before); + let file = SourceFile::parse(&before); + let result = f(&file, before_cursor_pos).expect("code action is not applicable"); + let actual = result.edit.apply(&before); + let actual_cursor_pos = match result.cursor_position { + None => result + .edit + .apply_to_offset(before_cursor_pos) + .expect("cursor position is affected by the edit"), + Some(off) => off, + }; + let actual = add_cursor(&actual, actual_cursor_pos); + assert_eq_text!(after, &actual); +} + +pub fn check_action_range Option>( + before: &str, + after: &str, + f: F, +) { + let (range, before) = extract_range(before); + let file = SourceFile::parse(&before); + let result = f(&file, range).expect("code action is not applicable"); + let actual = result.edit.apply(&before); + let actual_cursor_pos = match result.cursor_position { + None => result.edit.apply_to_offset(range.start()).unwrap(), + Some(off) => off, + }; + let actual = add_cursor(&actual, actual_cursor_pos); + assert_eq_text!(after, &actual); +} diff --git a/crates/ra_ide_api_light/src/typing.rs b/crates/ra_ide_api_light/src/typing.rs new file mode 100644 index 000000000..d8177f245 --- /dev/null +++ b/crates/ra_ide_api_light/src/typing.rs @@ -0,0 +1,826 @@ +use std::mem; + +use itertools::Itertools; +use ra_syntax::{ + algo::{find_node_at_offset, find_covering_node, find_leaf_at_offset, LeafAtOffset}, + ast, + AstNode, Direction, SourceFile, SyntaxKind, + SyntaxKind::*, + SyntaxNode, TextRange, TextUnit, +}; + +use crate::{LocalEdit, TextEditBuilder}; + +pub fn join_lines(file: &SourceFile, range: TextRange) -> LocalEdit { + let range = if range.is_empty() { + let syntax = file.syntax(); + let text = syntax.text().slice(range.start()..); + let pos = match text.find('\n') { + None => { + return LocalEdit { + label: "join lines".to_string(), + edit: TextEditBuilder::default().finish(), + cursor_position: None, + }; + } + Some(pos) => pos, + }; + TextRange::offset_len(range.start() + pos, TextUnit::of_char('\n')) + } else { + range + }; + + let node = find_covering_node(file.syntax(), range); + let mut edit = TextEditBuilder::default(); + for node in node.descendants() { + let text = match node.leaf_text() { + Some(text) => text, + None => continue, + }; + let range = match range.intersection(&node.range()) { + Some(range) => range, + None => continue, + } - node.range().start(); + for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') { + let pos: TextUnit = (pos as u32).into(); + let off = node.range().start() + range.start() + pos; + if !edit.invalidates_offset(off) { + remove_newline(&mut edit, node, text.as_str(), off); + } + } + } + + LocalEdit { + label: "join lines".to_string(), + edit: edit.finish(), + cursor_position: None, + } +} + +pub fn on_enter(file: &SourceFile, offset: TextUnit) -> Option { + let comment = find_leaf_at_offset(file.syntax(), offset) + .left_biased() + .and_then(ast::Comment::cast)?; + + if let ast::CommentFlavor::Multiline = comment.flavor() { + return None; + } + + let prefix = comment.prefix(); + if offset < comment.syntax().range().start() + TextUnit::of_str(prefix) + TextUnit::from(1) { + return None; + } + + let indent = node_indent(file, comment.syntax())?; + let inserted = format!("\n{}{} ", indent, prefix); + let cursor_position = offset + TextUnit::of_str(&inserted); + let mut edit = TextEditBuilder::default(); + edit.insert(offset, inserted); + Some(LocalEdit { + label: "on enter".to_string(), + edit: edit.finish(), + cursor_position: Some(cursor_position), + }) +} + +fn node_indent<'a>(file: &'a SourceFile, node: &SyntaxNode) -> Option<&'a str> { + let ws = match find_leaf_at_offset(file.syntax(), node.range().start()) { + LeafAtOffset::Between(l, r) => { + assert!(r == node); + l + } + LeafAtOffset::Single(n) => { + assert!(n == node); + return Some(""); + } + LeafAtOffset::None => unreachable!(), + }; + if ws.kind() != WHITESPACE { + return None; + } + let text = ws.leaf_text().unwrap(); + let pos = text.as_str().rfind('\n').map(|it| it + 1).unwrap_or(0); + Some(&text[pos..]) +} + +pub fn on_eq_typed(file: &SourceFile, offset: TextUnit) -> Option { + let let_stmt: &ast::LetStmt = find_node_at_offset(file.syntax(), offset)?; + if let_stmt.has_semi() { + return None; + } + if let Some(expr) = let_stmt.initializer() { + let expr_range = expr.syntax().range(); + if expr_range.contains(offset) && offset != expr_range.start() { + return None; + } + if file + .syntax() + .text() + .slice(offset..expr_range.start()) + .contains('\n') + { + return None; + } + } else { + return None; + } + let offset = let_stmt.syntax().range().end(); + let mut edit = TextEditBuilder::default(); + edit.insert(offset, ";".to_string()); + Some(LocalEdit { + label: "add semicolon".to_string(), + edit: edit.finish(), + cursor_position: None, + }) +} + +pub fn on_dot_typed(file: &SourceFile, offset: TextUnit) -> Option { + let before_dot_offset = offset - TextUnit::of_char('.'); + + let whitespace = find_leaf_at_offset(file.syntax(), before_dot_offset).left_biased()?; + + // find whitespace just left of the dot + ast::Whitespace::cast(whitespace)?; + + // make sure there is a method call + let method_call = whitespace + .siblings(Direction::Prev) + // first is whitespace + .skip(1) + .next()?; + + ast::MethodCallExpr::cast(method_call)?; + + // find how much the _method call is indented + let method_chain_indent = method_call + .parent()? + .siblings(Direction::Prev) + .skip(1) + .next()? + .leaf_text() + .map(|x| last_line_indent_in_whitespace(x))?; + + let current_indent = TextUnit::of_str(last_line_indent_in_whitespace(whitespace.leaf_text()?)); + // TODO: indent is always 4 spaces now. A better heuristic could look on the previous line(s) + + let target_indent = TextUnit::of_str(method_chain_indent) + TextUnit::from_usize(4); + + let diff = target_indent - current_indent; + + let indent = "".repeat(diff.to_usize()); + + let cursor_position = offset + diff; + let mut edit = TextEditBuilder::default(); + edit.insert(before_dot_offset, indent); + Some(LocalEdit { + label: "indent dot".to_string(), + edit: edit.finish(), + cursor_position: Some(cursor_position), + }) +} + +/// Finds the last line in the whitespace +fn last_line_indent_in_whitespace(ws: &str) -> &str { + ws.split('\n').last().unwrap_or("") +} + +fn remove_newline( + edit: &mut TextEditBuilder, + node: &SyntaxNode, + node_text: &str, + offset: TextUnit, +) { + if node.kind() != WHITESPACE || node_text.bytes().filter(|&b| b == b'\n').count() != 1 { + // The node is either the first or the last in the file + let suff = &node_text[TextRange::from_to( + offset - node.range().start() + TextUnit::of_char('\n'), + TextUnit::of_str(node_text), + )]; + let spaces = suff.bytes().take_while(|&b| b == b' ').count(); + + edit.replace( + TextRange::offset_len(offset, ((spaces + 1) as u32).into()), + " ".to_string(), + ); + return; + } + + // Special case that turns something like: + // + // ``` + // my_function({<|> + // + // }) + // ``` + // + // into `my_function()` + if join_single_expr_block(edit, node).is_some() { + return; + } + // ditto for + // + // ``` + // use foo::{<|> + // bar + // }; + // ``` + if join_single_use_tree(edit, node).is_some() { + return; + } + + // The node is between two other nodes + let prev = node.prev_sibling().unwrap(); + let next = node.next_sibling().unwrap(); + if is_trailing_comma(prev.kind(), next.kind()) { + // Removes: trailing comma, newline (incl. surrounding whitespace) + edit.delete(TextRange::from_to(prev.range().start(), node.range().end())); + } else if prev.kind() == COMMA && next.kind() == R_CURLY { + // Removes: comma, newline (incl. surrounding whitespace) + let space = if let Some(left) = prev.prev_sibling() { + compute_ws(left, next) + } else { + " " + }; + edit.replace( + TextRange::from_to(prev.range().start(), node.range().end()), + space.to_string(), + ); + } else if let (Some(_), Some(next)) = (ast::Comment::cast(prev), ast::Comment::cast(next)) { + // Removes: newline (incl. surrounding whitespace), start of the next comment + edit.delete(TextRange::from_to( + node.range().start(), + next.syntax().range().start() + TextUnit::of_str(next.prefix()), + )); + } else { + // Remove newline but add a computed amount of whitespace characters + edit.replace(node.range(), compute_ws(prev, next).to_string()); + } +} + +fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool { + match (left, right) { + (COMMA, R_PAREN) | (COMMA, R_BRACK) => true, + _ => false, + } +} + +fn join_single_expr_block(edit: &mut TextEditBuilder, node: &SyntaxNode) -> Option<()> { + let block = ast::Block::cast(node.parent()?)?; + let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?; + let expr = single_expr(block)?; + edit.replace( + block_expr.syntax().range(), + expr.syntax().text().to_string(), + ); + Some(()) +} + +fn single_expr(block: &ast::Block) -> Option<&ast::Expr> { + let mut res = None; + for child in block.syntax().children() { + if let Some(expr) = ast::Expr::cast(child) { + if expr.syntax().text().contains('\n') { + return None; + } + if mem::replace(&mut res, Some(expr)).is_some() { + return None; + } + } else { + match child.kind() { + WHITESPACE | L_CURLY | R_CURLY => (), + _ => return None, + } + } + } + res +} + +fn join_single_use_tree(edit: &mut TextEditBuilder, node: &SyntaxNode) -> Option<()> { + let use_tree_list = ast::UseTreeList::cast(node.parent()?)?; + let (tree,) = use_tree_list.use_trees().collect_tuple()?; + edit.replace( + use_tree_list.syntax().range(), + tree.syntax().text().to_string(), + ); + Some(()) +} + +fn compute_ws(left: &SyntaxNode, right: &SyntaxNode) -> &'static str { + match left.kind() { + L_PAREN | L_BRACK => return "", + L_CURLY => { + if let USE_TREE = right.kind() { + return ""; + } + } + _ => (), + } + match right.kind() { + R_PAREN | R_BRACK => return "", + R_CURLY => { + if let USE_TREE = left.kind() { + return ""; + } + } + DOT => return "", + _ => (), + } + " " +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::test_utils::{ + add_cursor, assert_eq_text, check_action, extract_offset, extract_range, +}; + + fn check_join_lines(before: &str, after: &str) { + check_action(before, after, |file, offset| { + let range = TextRange::offset_len(offset, 0.into()); + let res = join_lines(file, range); + Some(res) + }) + } + + #[test] + fn test_join_lines_comma() { + check_join_lines( + r" +fn foo() { + <|>foo(1, + ) +} +", + r" +fn foo() { + <|>foo(1) +} +", + ); + } + + #[test] + fn test_join_lines_lambda_block() { + check_join_lines( + r" +pub fn reparse(&self, edit: &AtomTextEdit) -> File { + <|>self.incremental_reparse(edit).unwrap_or_else(|| { + self.full_reparse(edit) + }) +} +", + r" +pub fn reparse(&self, edit: &AtomTextEdit) -> File { + <|>self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) +} +", + ); + } + + #[test] + fn test_join_lines_block() { + check_join_lines( + r" +fn foo() { + foo(<|>{ + 92 + }) +}", + r" +fn foo() { + foo(<|>92) +}", + ); + } + + #[test] + fn test_join_lines_use_items_left() { + // No space after the '{' + check_join_lines( + r" +<|>use ra_syntax::{ + TextUnit, TextRange, +};", + r" +<|>use ra_syntax::{TextUnit, TextRange, +};", + ); + } + + #[test] + fn test_join_lines_use_items_right() { + // No space after the '}' + check_join_lines( + r" +use ra_syntax::{ +<|> TextUnit, TextRange +};", + r" +use ra_syntax::{ +<|> TextUnit, TextRange};", + ); + } + + #[test] + fn test_join_lines_use_items_right_comma() { + // No space after the '}' + check_join_lines( + r" +use ra_syntax::{ +<|> TextUnit, TextRange, +};", + r" +use ra_syntax::{ +<|> TextUnit, TextRange};", + ); + } + + #[test] + fn test_join_lines_use_tree() { + check_join_lines( + r" +use ra_syntax::{ + algo::<|>{ + find_leaf_at_offset, + }, + ast, +};", + r" +use ra_syntax::{ + algo::<|>find_leaf_at_offset, + ast, +};", + ); + } + + #[test] + fn test_join_lines_normal_comments() { + check_join_lines( + r" +fn foo() { + // Hello<|> + // world! +} +", + r" +fn foo() { + // Hello<|> world! +} +", + ); + } + + #[test] + fn test_join_lines_doc_comments() { + check_join_lines( + r" +fn foo() { + /// Hello<|> + /// world! +} +", + r" +fn foo() { + /// Hello<|> world! +} +", + ); + } + + #[test] + fn test_join_lines_mod_comments() { + check_join_lines( + r" +fn foo() { + //! Hello<|> + //! world! +} +", + r" +fn foo() { + //! Hello<|> world! +} +", + ); + } + + #[test] + fn test_join_lines_multiline_comments_1() { + check_join_lines( + r" +fn foo() { + // Hello<|> + /* world! */ +} +", + r" +fn foo() { + // Hello<|> world! */ +} +", + ); + } + + #[test] + fn test_join_lines_multiline_comments_2() { + check_join_lines( + r" +fn foo() { + // The<|> + /* quick + brown + fox! */ +} +", + r" +fn foo() { + // The<|> quick + brown + fox! */ +} +", + ); + } + + fn check_join_lines_sel(before: &str, after: &str) { + let (sel, before) = extract_range(before); + let file = SourceFile::parse(&before); + let result = join_lines(&file, sel); + let actual = result.edit.apply(&before); + assert_eq_text!(after, &actual); + } + + #[test] + fn test_join_lines_selection_fn_args() { + check_join_lines_sel( + r" +fn foo() { + <|>foo(1, + 2, + 3, + <|>) +} + ", + r" +fn foo() { + foo(1, 2, 3) +} + ", + ); + } + + #[test] + fn test_join_lines_selection_struct() { + check_join_lines_sel( + r" +struct Foo <|>{ + f: u32, +}<|> + ", + r" +struct Foo { f: u32 } + ", + ); + } + + #[test] + fn test_join_lines_selection_dot_chain() { + check_join_lines_sel( + r" +fn foo() { + join(<|>type_params.type_params() + .filter_map(|it| it.name()) + .map(|it| it.text())<|>) +}", + r" +fn foo() { + join(type_params.type_params().filter_map(|it| it.name()).map(|it| it.text())) +}", + ); + } + + #[test] + fn test_join_lines_selection_lambda_block_body() { + check_join_lines_sel( + r" +pub fn handle_find_matching_brace() { + params.offsets + .map(|offset| <|>{ + world.analysis().matching_brace(&file, offset).unwrap_or(offset) + }<|>) + .collect(); +}", + r" +pub fn handle_find_matching_brace() { + params.offsets + .map(|offset| world.analysis().matching_brace(&file, offset).unwrap_or(offset)) + .collect(); +}", + ); + } + + #[test] + fn test_on_eq_typed() { + fn do_check(before: &str, after: &str) { + let (offset, before) = extract_offset(before); + let file = SourceFile::parse(&before); + let result = on_eq_typed(&file, offset).unwrap(); + let actual = result.edit.apply(&before); + assert_eq_text!(after, &actual); + } + + // do_check(r" + // fn foo() { + // let foo =<|> + // } + // ", r" + // fn foo() { + // let foo =; + // } + // "); + do_check( + r" +fn foo() { + let foo =<|> 1 + 1 +} +", + r" +fn foo() { + let foo = 1 + 1; +} +", + ); + // do_check(r" + // fn foo() { + // let foo =<|> + // let bar = 1; + // } + // ", r" + // fn foo() { + // let foo =; + // let bar = 1; + // } + // "); + } + + #[test] + fn test_on_dot_typed() { + fn do_check(before: &str, after: &str) { + let (offset, before) = extract_offset(before); + let file = SourceFile::parse(&before); + if let Some(result) = on_eq_typed(&file, offset) { + let actual = result.edit.apply(&before); + assert_eq_text!(after, &actual); + }; + } + // indent if continuing chain call + do_check( + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + .<|> + } +", + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + . + } +", + ); + + // do not indent if already indented + do_check( + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + .<|> + } +", + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + . + } +", + ); + + // indent if the previous line is already indented + do_check( + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + .first() + .<|> + } +", + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + .first() + . + } +", + ); + + // don't indent if indent matches previous line + do_check( + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + .first() + .<|> + } +", + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + .first() + . + } +", + ); + + // don't indent if there is no method call on previous line + do_check( + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + .<|> + } +", + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + . + } +", + ); + + // indent to match previous expr + do_check( + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) +.<|> + } +", + r" + pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable> { + self.child_impl(db, name) + . + } +", + ); + } + + #[test] + fn test_on_enter() { + fn apply_on_enter(before: &str) -> Option { + let (offset, before) = extract_offset(before); + let file = SourceFile::parse(&before); + let result = on_enter(&file, offset)?; + let actual = result.edit.apply(&before); + let actual = add_cursor(&actual, result.cursor_position.unwrap()); + Some(actual) + } + + fn do_check(before: &str, after: &str) { + let actual = apply_on_enter(before).unwrap(); + assert_eq_text!(after, &actual); + } + + fn do_check_noop(text: &str) { + assert!(apply_on_enter(text).is_none()) + } + + do_check( + r" +/// Some docs<|> +fn foo() { +} +", + r" +/// Some docs +/// <|> +fn foo() { +} +", + ); + do_check( + r" +impl S { + /// Some<|> docs. + fn foo() {} +} +", + r" +impl S { + /// Some + /// <|> docs. + fn foo() {} +} +", + ); + do_check_noop(r"<|>//! docz"); + } +} -- cgit v1.2.3 From fa3c9ce3921b6a3f67222bf4f9b4efdf4f11c2a5 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Tue, 8 Jan 2019 22:30:32 +0300 Subject: fix usages after rename --- crates/ra_analysis/Cargo.toml | 2 +- crates/ra_analysis/src/extend_selection.rs | 4 ++-- crates/ra_analysis/src/imp.rs | 4 ++-- crates/ra_analysis/src/lib.rs | 21 ++++++++++++--------- crates/ra_analysis/src/syntax_highlighting.rs | 7 +++---- crates/ra_cli/Cargo.toml | 2 +- crates/ra_cli/src/main.rs | 2 +- crates/ra_ide_api_light/Cargo.toml | 2 +- crates/ra_ide_api_light/src/lib.rs | 5 +++++ 9 files changed, 28 insertions(+), 21 deletions(-) (limited to 'crates') diff --git a/crates/ra_analysis/Cargo.toml b/crates/ra_analysis/Cargo.toml index 11c78ced8..3c0814279 100644 --- a/crates/ra_analysis/Cargo.toml +++ b/crates/ra_analysis/Cargo.toml @@ -16,7 +16,7 @@ parking_lot = "0.7.0" unicase = "2.2.0" ra_syntax = { path = "../ra_syntax" } -ra_editor = { path = "../ra_editor" } +ra_ide_api_light = { path = "../ra_ide_api_light" } ra_text_edit = { path = "../ra_text_edit" } ra_db = { path = "../ra_db" } hir = { path = "../ra_hir", package = "ra_hir" } diff --git a/crates/ra_analysis/src/extend_selection.rs b/crates/ra_analysis/src/extend_selection.rs index 3b130f966..c3c809c9f 100644 --- a/crates/ra_analysis/src/extend_selection.rs +++ b/crates/ra_analysis/src/extend_selection.rs @@ -14,7 +14,7 @@ pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRang if let Some(range) = extend_selection_in_macro(db, &source_file, frange) { return range; } - ra_editor::extend_selection(source_file.syntax(), frange.range).unwrap_or(frange.range) + ra_ide_api_light::extend_selection(source_file.syntax(), frange.range).unwrap_or(frange.range) } fn extend_selection_in_macro( @@ -25,7 +25,7 @@ fn extend_selection_in_macro( let macro_call = find_macro_call(source_file.syntax(), frange.range)?; let (off, exp) = hir::MacroDef::ast_expand(macro_call)?; let dst_range = exp.map_range_forward(frange.range - off)?; - let dst_range = ra_editor::extend_selection(&exp.syntax(), dst_range)?; + let dst_range = ra_ide_api_light::extend_selection(&exp.syntax(), dst_range)?; let src_range = exp.map_range_back(dst_range)? + off; Some(src_range) } diff --git a/crates/ra_analysis/src/imp.rs b/crates/ra_analysis/src/imp.rs index 2b9963b3c..7c60ab7d6 100644 --- a/crates/ra_analysis/src/imp.rs +++ b/crates/ra_analysis/src/imp.rs @@ -6,7 +6,7 @@ use hir::{ self, Problem, source_binder, }; use ra_db::{FilesDatabase, SourceRoot, SourceRootId, SyntaxDatabase}; -use ra_editor::{self, assists, LocalEdit, Severity}; +use ra_ide_api_light::{self, assists, LocalEdit, Severity}; use ra_syntax::{ TextRange, AstNode, SourceFile, ast::{self, NameOwner}, @@ -194,7 +194,7 @@ impl db::RootDatabase { pub(crate) fn diagnostics(&self, file_id: FileId) -> Cancelable> { let syntax = self.source_file(file_id); - let mut res = ra_editor::diagnostics(&syntax) + let mut res = ra_ide_api_light::diagnostics(&syntax) .into_iter() .map(|d| Diagnostic { range: d.range, diff --git a/crates/ra_analysis/src/lib.rs b/crates/ra_analysis/src/lib.rs index 48df08416..183e36706 100644 --- a/crates/ra_analysis/src/lib.rs +++ b/crates/ra_analysis/src/lib.rs @@ -44,7 +44,7 @@ pub use crate::{ completion::{CompletionItem, CompletionItemKind, InsertText}, runnables::{Runnable, RunnableKind}, }; -pub use ra_editor::{ +pub use ra_ide_api_light::{ Fold, FoldKind, HighlightedRange, Severity, StructureNode, LineIndex, LineCol, translate_offset_with_edit, }; @@ -336,25 +336,28 @@ impl Analysis { /// Returns position of the mathcing brace (all types of braces are /// supported). pub fn matching_brace(&self, file: &SourceFile, offset: TextUnit) -> Option { - ra_editor::matching_brace(file, offset) + ra_ide_api_light::matching_brace(file, offset) } /// Returns a syntax tree represented as `String`, for debug purposes. // FIXME: use a better name here. pub fn syntax_tree(&self, file_id: FileId) -> String { let file = self.db.source_file(file_id); - ra_editor::syntax_tree(&file) + ra_ide_api_light::syntax_tree(&file) } /// Returns an edit to remove all newlines in the range, cleaning up minor /// stuff like trailing commas. pub fn join_lines(&self, frange: FileRange) -> SourceChange { let file = self.db.source_file(frange.file_id); - SourceChange::from_local_edit(frange.file_id, ra_editor::join_lines(&file, frange.range)) + SourceChange::from_local_edit( + frange.file_id, + ra_ide_api_light::join_lines(&file, frange.range), + ) } /// Returns an edit which should be applied when opening a new line, fixing /// up minor stuff like continuing the comment. pub fn on_enter(&self, position: FilePosition) -> Option { let file = self.db.source_file(position.file_id); - let edit = ra_editor::on_enter(&file, position.offset)?; + let edit = ra_ide_api_light::on_enter(&file, position.offset)?; Some(SourceChange::from_local_edit(position.file_id, edit)) } /// Returns an edit which should be applied after `=` was typed. Primarily, @@ -362,25 +365,25 @@ impl Analysis { // FIXME: use a snippet completion instead of this hack here. pub fn on_eq_typed(&self, position: FilePosition) -> Option { let file = self.db.source_file(position.file_id); - let edit = ra_editor::on_eq_typed(&file, position.offset)?; + let edit = ra_ide_api_light::on_eq_typed(&file, position.offset)?; Some(SourceChange::from_local_edit(position.file_id, edit)) } /// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately. pub fn on_dot_typed(&self, position: FilePosition) -> Option { let file = self.db.source_file(position.file_id); - let edit = ra_editor::on_dot_typed(&file, position.offset)?; + let edit = ra_ide_api_light::on_dot_typed(&file, position.offset)?; Some(SourceChange::from_local_edit(position.file_id, edit)) } /// Returns a tree representation of symbols in the file. Useful to draw a /// file outline. pub fn file_structure(&self, file_id: FileId) -> Vec { let file = self.db.source_file(file_id); - ra_editor::file_structure(&file) + ra_ide_api_light::file_structure(&file) } /// Returns the set of folding ranges. pub fn folding_ranges(&self, file_id: FileId) -> Vec { let file = self.db.source_file(file_id); - ra_editor::folding_ranges(&file) + ra_ide_api_light::folding_ranges(&file) } /// Fuzzy searches for a symbol. pub fn symbol_search(&self, query: Query) -> Cancelable> { diff --git a/crates/ra_analysis/src/syntax_highlighting.rs b/crates/ra_analysis/src/syntax_highlighting.rs index d2dc6cfbb..cb19e9515 100644 --- a/crates/ra_analysis/src/syntax_highlighting.rs +++ b/crates/ra_analysis/src/syntax_highlighting.rs @@ -1,22 +1,21 @@ use ra_syntax::{ast, AstNode,}; -use ra_editor::HighlightedRange; use ra_db::SyntaxDatabase; use crate::{ + FileId, Cancelable, HighlightedRange, db::RootDatabase, - FileId, Cancelable, }; pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Cancelable> { let source_file = db.source_file(file_id); - let mut res = ra_editor::highlight(source_file.syntax()); + let mut res = ra_ide_api_light::highlight(source_file.syntax()); for macro_call in source_file .syntax() .descendants() .filter_map(ast::MacroCall::cast) { if let Some((off, exp)) = hir::MacroDef::ast_expand(macro_call) { - let mapped_ranges = ra_editor::highlight(&exp.syntax()) + let mapped_ranges = ra_ide_api_light::highlight(&exp.syntax()) .into_iter() .filter_map(|r| { let mapped_range = exp.map_range_back(r.range)?; diff --git a/crates/ra_cli/Cargo.toml b/crates/ra_cli/Cargo.toml index 83f1d91e0..eb1722d5e 100644 --- a/crates/ra_cli/Cargo.toml +++ b/crates/ra_cli/Cargo.toml @@ -10,5 +10,5 @@ clap = "2.32.0" failure = "0.1.4" join_to_string = "0.1.1" ra_syntax = { path = "../ra_syntax" } -ra_editor = { path = "../ra_editor" } +ra_ide_api_light = { path = "../ra_ide_api_light" } tools = { path = "../tools" } diff --git a/crates/ra_cli/src/main.rs b/crates/ra_cli/src/main.rs index 0d12f3a88..43fb2fc4c 100644 --- a/crates/ra_cli/src/main.rs +++ b/crates/ra_cli/src/main.rs @@ -2,7 +2,7 @@ use std::{fs, io::Read, path::Path, time::Instant}; use clap::{App, Arg, SubCommand}; use join_to_string::join; -use ra_editor::{extend_selection, file_structure, syntax_tree}; +use ra_ide_api_light::{extend_selection, file_structure, syntax_tree}; use ra_syntax::{SourceFile, TextRange, TreePtr, AstNode}; use tools::collect_tests; diff --git a/crates/ra_ide_api_light/Cargo.toml b/crates/ra_ide_api_light/Cargo.toml index a97d2308f..8c192fca6 100644 --- a/crates/ra_ide_api_light/Cargo.toml +++ b/crates/ra_ide_api_light/Cargo.toml @@ -1,6 +1,6 @@ [package] edition = "2018" -name = "ra_editor" +name = "ra_ide_api_light" version = "0.1.0" authors = ["Aleksey Kladov "] publish = false diff --git a/crates/ra_ide_api_light/src/lib.rs b/crates/ra_ide_api_light/src/lib.rs index 5a6af19b7..40638eda8 100644 --- a/crates/ra_ide_api_light/src/lib.rs +++ b/crates/ra_ide_api_light/src/lib.rs @@ -1,3 +1,8 @@ +//! This crate provides thouse IDE features which use only a single file. +//! +//! This usually means functions which take sytnax tree as an input and produce +//! an edit or some auxilarly info. + pub mod assists; mod extend_selection; mod folding_ranges; -- cgit v1.2.3 From 6bca91af532d79abbced5b151cb4188ff8625c04 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Tue, 8 Jan 2019 22:30:56 +0300 Subject: rename ra_analysis -> ra_ide_api --- crates/ra_analysis/Cargo.toml | 23 - crates/ra_analysis/src/call_info.rs | 451 ------------------ crates/ra_analysis/src/completion.rs | 77 ---- crates/ra_analysis/src/completion/complete_dot.rs | 121 ----- .../src/completion/complete_fn_param.rs | 102 ----- .../ra_analysis/src/completion/complete_keyword.rs | 339 -------------- crates/ra_analysis/src/completion/complete_path.rs | 128 ------ .../ra_analysis/src/completion/complete_scope.rs | 192 -------- .../ra_analysis/src/completion/complete_snippet.rs | 73 --- .../src/completion/completion_context.rs | 205 --------- .../ra_analysis/src/completion/completion_item.rs | 244 ---------- crates/ra_analysis/src/db.rs | 128 ------ crates/ra_analysis/src/extend_selection.rs | 56 --- crates/ra_analysis/src/goto_defenition.rs | 139 ------ crates/ra_analysis/src/hover.rs | 257 ----------- crates/ra_analysis/src/imp.rs | 309 ------------- crates/ra_analysis/src/lib.rs | 509 --------------------- crates/ra_analysis/src/mock_analysis.rs | 135 ------ crates/ra_analysis/src/runnables.rs | 89 ---- crates/ra_analysis/src/symbol_index.rs | 222 --------- crates/ra_analysis/src/syntax_highlighting.rs | 92 ---- crates/ra_analysis/tests/test/main.rs | 249 ---------- crates/ra_analysis/tests/test/runnables.rs | 109 ----- 23 files changed, 4249 deletions(-) delete mode 100644 crates/ra_analysis/Cargo.toml delete mode 100644 crates/ra_analysis/src/call_info.rs delete mode 100644 crates/ra_analysis/src/completion.rs delete mode 100644 crates/ra_analysis/src/completion/complete_dot.rs delete mode 100644 crates/ra_analysis/src/completion/complete_fn_param.rs delete mode 100644 crates/ra_analysis/src/completion/complete_keyword.rs delete mode 100644 crates/ra_analysis/src/completion/complete_path.rs delete mode 100644 crates/ra_analysis/src/completion/complete_scope.rs delete mode 100644 crates/ra_analysis/src/completion/complete_snippet.rs delete mode 100644 crates/ra_analysis/src/completion/completion_context.rs delete mode 100644 crates/ra_analysis/src/completion/completion_item.rs delete mode 100644 crates/ra_analysis/src/db.rs delete mode 100644 crates/ra_analysis/src/extend_selection.rs delete mode 100644 crates/ra_analysis/src/goto_defenition.rs delete mode 100644 crates/ra_analysis/src/hover.rs delete mode 100644 crates/ra_analysis/src/imp.rs delete mode 100644 crates/ra_analysis/src/lib.rs delete mode 100644 crates/ra_analysis/src/mock_analysis.rs delete mode 100644 crates/ra_analysis/src/runnables.rs delete mode 100644 crates/ra_analysis/src/symbol_index.rs delete mode 100644 crates/ra_analysis/src/syntax_highlighting.rs delete mode 100644 crates/ra_analysis/tests/test/main.rs delete mode 100644 crates/ra_analysis/tests/test/runnables.rs (limited to 'crates') diff --git a/crates/ra_analysis/Cargo.toml b/crates/ra_analysis/Cargo.toml deleted file mode 100644 index 3c0814279..000000000 --- a/crates/ra_analysis/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -edition = "2018" -name = "ra_analysis" -version = "0.1.0" -authors = ["Aleksey Kladov "] - -[dependencies] -itertools = "0.8.0" -log = "0.4.5" -relative-path = "0.4.0" -rayon = "1.0.2" -fst = "0.3.1" -salsa = "0.9.1" -rustc-hash = "1.0" -parking_lot = "0.7.0" -unicase = "2.2.0" - -ra_syntax = { path = "../ra_syntax" } -ra_ide_api_light = { path = "../ra_ide_api_light" } -ra_text_edit = { path = "../ra_text_edit" } -ra_db = { path = "../ra_db" } -hir = { path = "../ra_hir", package = "ra_hir" } -test_utils = { path = "../test_utils" } diff --git a/crates/ra_analysis/src/call_info.rs b/crates/ra_analysis/src/call_info.rs deleted file mode 100644 index 27b760780..000000000 --- a/crates/ra_analysis/src/call_info.rs +++ /dev/null @@ -1,451 +0,0 @@ -use std::cmp::{max, min}; - -use ra_db::{SyntaxDatabase, Cancelable}; -use ra_syntax::{ - AstNode, SyntaxNode, TextUnit, TextRange, - SyntaxKind::FN_DEF, - ast::{self, ArgListOwner, DocCommentsOwner}, - algo::find_node_at_offset, -}; - -use crate::{FilePosition, CallInfo, db::RootDatabase}; - -/// Computes parameter information for the given call expression. -pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Cancelable> { - let file = db.source_file(position.file_id); - let syntax = file.syntax(); - - // Find the calling expression and it's NameRef - let calling_node = ctry!(FnCallNode::with_node(syntax, position.offset)); - let name_ref = ctry!(calling_node.name_ref()); - - // Resolve the function's NameRef (NOTE: this isn't entirely accurate). - let file_symbols = db.index_resolve(name_ref)?; - let symbol = ctry!(file_symbols.into_iter().find(|it| it.ptr.kind() == FN_DEF)); - let fn_file = db.source_file(symbol.file_id); - let fn_def = symbol.ptr.resolve(&fn_file); - let fn_def = ast::FnDef::cast(&fn_def).unwrap(); - let mut call_info = ctry!(CallInfo::new(fn_def)); - // If we have a calling expression let's find which argument we are on - let num_params = call_info.parameters.len(); - let has_self = fn_def.param_list().and_then(|l| l.self_param()).is_some(); - - if num_params == 1 { - if !has_self { - call_info.active_parameter = Some(0); - } - } else if num_params > 1 { - // Count how many parameters into the call we are. - // TODO: This is best effort for now and should be fixed at some point. - // It may be better to see where we are in the arg_list and then check - // where offset is in that list (or beyond). - // Revisit this after we get documentation comments in. - if let Some(ref arg_list) = calling_node.arg_list() { - let start = arg_list.syntax().range().start(); - - let range_search = TextRange::from_to(start, position.offset); - let mut commas: usize = arg_list - .syntax() - .text() - .slice(range_search) - .to_string() - .matches(',') - .count(); - - // If we have a method call eat the first param since it's just self. - if has_self { - commas += 1; - } - - call_info.active_parameter = Some(commas); - } - } - - Ok(Some(call_info)) -} - -enum FnCallNode<'a> { - CallExpr(&'a ast::CallExpr), - MethodCallExpr(&'a ast::MethodCallExpr), -} - -impl<'a> FnCallNode<'a> { - pub fn with_node(syntax: &'a SyntaxNode, offset: TextUnit) -> Option> { - if let Some(expr) = find_node_at_offset::(syntax, offset) { - return Some(FnCallNode::CallExpr(expr)); - } - if let Some(expr) = find_node_at_offset::(syntax, offset) { - return Some(FnCallNode::MethodCallExpr(expr)); - } - None - } - - pub fn name_ref(&self) -> Option<&'a ast::NameRef> { - match *self { - FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() { - ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?, - _ => return None, - }), - - FnCallNode::MethodCallExpr(call_expr) => call_expr - .syntax() - .children() - .filter_map(ast::NameRef::cast) - .nth(0), - } - } - - pub fn arg_list(&self) -> Option<&'a ast::ArgList> { - match *self { - FnCallNode::CallExpr(expr) => expr.arg_list(), - FnCallNode::MethodCallExpr(expr) => expr.arg_list(), - } - } -} - -impl CallInfo { - fn new(node: &ast::FnDef) -> Option { - let mut doc = None; - - // Strip the body out for the label. - let mut label: String = if let Some(body) = node.body() { - let body_range = body.syntax().range(); - let label: String = node - .syntax() - .children() - .filter(|child| !child.range().is_subrange(&body_range)) - .map(|node| node.text().to_string()) - .collect(); - label - } else { - node.syntax().text().to_string() - }; - - if let Some((comment_range, docs)) = extract_doc_comments(node) { - let comment_range = comment_range - .checked_sub(node.syntax().range().start()) - .unwrap(); - let start = comment_range.start().to_usize(); - let end = comment_range.end().to_usize(); - - // Remove the comment from the label - label.replace_range(start..end, ""); - - // Massage markdown - let mut processed_lines = Vec::new(); - let mut in_code_block = false; - for line in docs.lines() { - if line.starts_with("```") { - in_code_block = !in_code_block; - } - - let line = if in_code_block && line.starts_with("```") && !line.contains("rust") { - "```rust".into() - } else { - line.to_string() - }; - - processed_lines.push(line); - } - - if !processed_lines.is_empty() { - doc = Some(processed_lines.join("\n")); - } - } - - Some(CallInfo { - parameters: param_list(node), - label: label.trim().to_owned(), - doc, - active_parameter: None, - }) - } -} - -fn extract_doc_comments(node: &ast::FnDef) -> Option<(TextRange, String)> { - if node.doc_comments().count() == 0 { - return None; - } - - let comment_text = node.doc_comment_text(); - - let (begin, end) = node - .doc_comments() - .map(|comment| comment.syntax().range()) - .map(|range| (range.start().to_usize(), range.end().to_usize())) - .fold((std::usize::MAX, std::usize::MIN), |acc, range| { - (min(acc.0, range.0), max(acc.1, range.1)) - }); - - let range = TextRange::from_to(TextUnit::from_usize(begin), TextUnit::from_usize(end)); - - Some((range, comment_text)) -} - -fn param_list(node: &ast::FnDef) -> Vec { - let mut res = vec![]; - if let Some(param_list) = node.param_list() { - if let Some(self_param) = param_list.self_param() { - res.push(self_param.syntax().text().to_string()) - } - - // Maybe use param.pat here? See if we can just extract the name? - //res.extend(param_list.params().map(|p| p.syntax().text().to_string())); - res.extend( - param_list - .params() - .filter_map(|p| p.pat()) - .map(|pat| pat.syntax().text().to_string()), - ); - } - res -} - -#[cfg(test)] -mod tests { - use super::*; - - use crate::mock_analysis::single_file_with_position; - - fn call_info(text: &str) -> CallInfo { - let (analysis, position) = single_file_with_position(text); - analysis.call_info(position).unwrap().unwrap() - } - - #[test] - fn test_fn_signature_two_args_first() { - let info = call_info( - r#"fn foo(x: u32, y: u32) -> u32 {x + y} -fn bar() { foo(<|>3, ); }"#, - ); - - assert_eq!(info.parameters, vec!("x".to_string(), "y".to_string())); - assert_eq!(info.active_parameter, Some(0)); - } - - #[test] - fn test_fn_signature_two_args_second() { - let info = call_info( - r#"fn foo(x: u32, y: u32) -> u32 {x + y} -fn bar() { foo(3, <|>); }"#, - ); - - assert_eq!(info.parameters, vec!("x".to_string(), "y".to_string())); - assert_eq!(info.active_parameter, Some(1)); - } - - #[test] - fn test_fn_signature_for_impl() { - let info = call_info( - r#"struct F; impl F { pub fn new() { F{}} } -fn bar() {let _ : F = F::new(<|>);}"#, - ); - - assert_eq!(info.parameters, Vec::::new()); - assert_eq!(info.active_parameter, None); - } - - #[test] - fn test_fn_signature_for_method_self() { - let info = call_info( - r#"struct F; -impl F { - pub fn new() -> F{ - F{} - } - - pub fn do_it(&self) {} -} - -fn bar() { - let f : F = F::new(); - f.do_it(<|>); -}"#, - ); - - assert_eq!(info.parameters, vec!["&self".to_string()]); - assert_eq!(info.active_parameter, None); - } - - #[test] - fn test_fn_signature_for_method_with_arg() { - let info = call_info( - r#"struct F; -impl F { - pub fn new() -> F{ - F{} - } - - pub fn do_it(&self, x: i32) {} -} - -fn bar() { - let f : F = F::new(); - f.do_it(<|>); -}"#, - ); - - assert_eq!(info.parameters, vec!["&self".to_string(), "x".to_string()]); - assert_eq!(info.active_parameter, Some(1)); - } - - #[test] - fn test_fn_signature_with_docs_simple() { - let info = call_info( - r#" -/// test -// non-doc-comment -fn foo(j: u32) -> u32 { - j -} - -fn bar() { - let _ = foo(<|>); -} -"#, - ); - - assert_eq!(info.parameters, vec!["j".to_string()]); - assert_eq!(info.active_parameter, Some(0)); - assert_eq!(info.label, "fn foo(j: u32) -> u32".to_string()); - assert_eq!(info.doc, Some("test".into())); - } - - #[test] - fn test_fn_signature_with_docs() { - let info = call_info( - r#" -/// Adds one to the number given. -/// -/// # Examples -/// -/// ``` -/// let five = 5; -/// -/// assert_eq!(6, my_crate::add_one(5)); -/// ``` -pub fn add_one(x: i32) -> i32 { - x + 1 -} - -pub fn do() { - add_one(<|> -}"#, - ); - - assert_eq!(info.parameters, vec!["x".to_string()]); - assert_eq!(info.active_parameter, Some(0)); - assert_eq!(info.label, "pub fn add_one(x: i32) -> i32".to_string()); - assert_eq!( - info.doc, - Some( - r#"Adds one to the number given. - -# Examples - -```rust -let five = 5; - -assert_eq!(6, my_crate::add_one(5)); -```"# - .into() - ) - ); - } - - #[test] - fn test_fn_signature_with_docs_impl() { - let info = call_info( - r#" -struct addr; -impl addr { - /// Adds one to the number given. - /// - /// # Examples - /// - /// ``` - /// let five = 5; - /// - /// assert_eq!(6, my_crate::add_one(5)); - /// ``` - pub fn add_one(x: i32) -> i32 { - x + 1 - } -} - -pub fn do_it() { - addr {}; - addr::add_one(<|>); -}"#, - ); - - assert_eq!(info.parameters, vec!["x".to_string()]); - assert_eq!(info.active_parameter, Some(0)); - assert_eq!(info.label, "pub fn add_one(x: i32) -> i32".to_string()); - assert_eq!( - info.doc, - Some( - r#"Adds one to the number given. - -# Examples - -```rust -let five = 5; - -assert_eq!(6, my_crate::add_one(5)); -```"# - .into() - ) - ); - } - - #[test] - fn test_fn_signature_with_docs_from_actix() { - let info = call_info( - r#" -pub trait WriteHandler -where - Self: Actor, - Self::Context: ActorContext, -{ - /// Method is called when writer emits error. - /// - /// If this method returns `ErrorAction::Continue` writer processing - /// continues otherwise stream processing stops. - fn error(&mut self, err: E, ctx: &mut Self::Context) -> Running { - Running::Stop - } - - /// Method is called when writer finishes. - /// - /// By default this method stops actor's `Context`. - fn finished(&mut self, ctx: &mut Self::Context) { - ctx.stop() - } -} - -pub fn foo() { - WriteHandler r; - r.finished(<|>); -} - -"#, - ); - - assert_eq!( - info.parameters, - vec!["&mut self".to_string(), "ctx".to_string()] - ); - assert_eq!(info.active_parameter, Some(1)); - assert_eq!( - info.doc, - Some( - r#"Method is called when writer finishes. - -By default this method stops actor's `Context`."# - .into() - ) - ); - } - -} diff --git a/crates/ra_analysis/src/completion.rs b/crates/ra_analysis/src/completion.rs deleted file mode 100644 index ce777a771..000000000 --- a/crates/ra_analysis/src/completion.rs +++ /dev/null @@ -1,77 +0,0 @@ -mod completion_item; -mod completion_context; - -mod complete_dot; -mod complete_fn_param; -mod complete_keyword; -mod complete_snippet; -mod complete_path; -mod complete_scope; - -use ra_db::SyntaxDatabase; - -use crate::{ - db, - Cancelable, FilePosition, - completion::{ - completion_item::{Completions, CompletionKind}, - completion_context::CompletionContext, - }, -}; - -pub use crate::completion::completion_item::{CompletionItem, InsertText, CompletionItemKind}; - -/// Main entry point for completion. We run completion as a two-phase process. -/// -/// First, we look at the position and collect a so-called `CompletionContext. -/// This is a somewhat messy process, because, during completion, syntax tree is -/// incomplete and can look really weird. -/// -/// Once the context is collected, we run a series of completion routines which -/// look at the context and produce completion items. One subtelty about this -/// phase is that completion engine should not filter by the substring which is -/// already present, it should give all possible variants for the identifier at -/// the caret. In other words, for -/// -/// ```no-run -/// fn f() { -/// let foo = 92; -/// let _ = bar<|> -/// } -/// ``` -/// -/// `foo` *should* be present among the completion variants. Filtering by -/// identifier prefix/fuzzy match should be done higher in the stack, together -/// with ordering of completions (currently this is done by the client). -pub(crate) fn completions( - db: &db::RootDatabase, - position: FilePosition, -) -> Cancelable> { - let original_file = db.source_file(position.file_id); - let ctx = ctry!(CompletionContext::new(db, &original_file, position)?); - - let mut acc = Completions::default(); - - complete_fn_param::complete_fn_param(&mut acc, &ctx); - complete_keyword::complete_expr_keyword(&mut acc, &ctx); - complete_keyword::complete_use_tree_keyword(&mut acc, &ctx); - complete_snippet::complete_expr_snippet(&mut acc, &ctx); - complete_snippet::complete_item_snippet(&mut acc, &ctx); - complete_path::complete_path(&mut acc, &ctx)?; - complete_scope::complete_scope(&mut acc, &ctx)?; - complete_dot::complete_dot(&mut acc, &ctx)?; - - Ok(Some(acc)) -} - -#[cfg(test)] -fn check_completion(code: &str, expected_completions: &str, kind: CompletionKind) { - use crate::mock_analysis::{single_file_with_position, analysis_and_position}; - let (analysis, position) = if code.contains("//-") { - analysis_and_position(code) - } else { - single_file_with_position(code) - }; - let completions = completions(&analysis.db, position).unwrap().unwrap(); - completions.assert_match(expected_completions, kind); -} diff --git a/crates/ra_analysis/src/completion/complete_dot.rs b/crates/ra_analysis/src/completion/complete_dot.rs deleted file mode 100644 index 5d4e60dc5..000000000 --- a/crates/ra_analysis/src/completion/complete_dot.rs +++ /dev/null @@ -1,121 +0,0 @@ -use hir::{Ty, Def}; - -use crate::Cancelable; -use crate::completion::{CompletionContext, Completions, CompletionKind, CompletionItem, CompletionItemKind}; - -/// Complete dot accesses, i.e. fields or methods (currently only fields). -pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) -> Cancelable<()> { - let (function, receiver) = match (&ctx.function, ctx.dot_receiver) { - (Some(function), Some(receiver)) => (function, receiver), - _ => return Ok(()), - }; - let infer_result = function.infer(ctx.db)?; - let syntax_mapping = function.body_syntax_mapping(ctx.db)?; - let expr = match syntax_mapping.node_expr(receiver) { - Some(expr) => expr, - None => return Ok(()), - }; - let receiver_ty = infer_result[expr].clone(); - if !ctx.is_method_call { - complete_fields(acc, ctx, receiver_ty)?; - } - Ok(()) -} - -fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) -> Cancelable<()> { - for receiver in receiver.autoderef(ctx.db) { - match receiver { - Ty::Adt { def_id, .. } => { - match def_id.resolve(ctx.db)? { - Def::Struct(s) => { - let variant_data = s.variant_data(ctx.db)?; - for field in variant_data.fields() { - CompletionItem::new( - CompletionKind::Reference, - field.name().to_string(), - ) - .kind(CompletionItemKind::Field) - .add_to(acc); - } - } - // TODO unions - _ => {} - } - } - Ty::Tuple(fields) => { - for (i, _ty) in fields.iter().enumerate() { - CompletionItem::new(CompletionKind::Reference, i.to_string()) - .kind(CompletionItemKind::Field) - .add_to(acc); - } - } - _ => {} - }; - } - Ok(()) -} - -#[cfg(test)] -mod tests { - use crate::completion::*; - - fn check_ref_completion(code: &str, expected_completions: &str) { - check_completion(code, expected_completions, CompletionKind::Reference); - } - - #[test] - fn test_struct_field_completion() { - check_ref_completion( - r" - struct A { the_field: u32 } - fn foo(a: A) { - a.<|> - } - ", - r#"the_field"#, - ); - } - - #[test] - fn test_struct_field_completion_self() { - check_ref_completion( - r" - struct A { the_field: u32 } - impl A { - fn foo(self) { - self.<|> - } - } - ", - r#"the_field"#, - ); - } - - #[test] - fn test_struct_field_completion_autoderef() { - check_ref_completion( - r" - struct A { the_field: u32 } - impl A { - fn foo(&self) { - self.<|> - } - } - ", - r#"the_field"#, - ); - } - - #[test] - fn test_no_struct_field_completion_for_method_call() { - check_ref_completion( - r" - struct A { the_field: u32 } - fn foo(a: A) { - a.<|>() - } - ", - r#""#, - ); - } -} diff --git a/crates/ra_analysis/src/completion/complete_fn_param.rs b/crates/ra_analysis/src/completion/complete_fn_param.rs deleted file mode 100644 index c1739e47e..000000000 --- a/crates/ra_analysis/src/completion/complete_fn_param.rs +++ /dev/null @@ -1,102 +0,0 @@ -use ra_syntax::{ - algo::visit::{visitor_ctx, VisitorCtx}, - ast, - AstNode, -}; -use rustc_hash::FxHashMap; - -use crate::completion::{CompletionContext, Completions, CompletionKind, CompletionItem}; - -/// Complete repeated parametes, both name and type. For example, if all -/// functions in a file have a `spam: &mut Spam` parameter, a completion with -/// `spam: &mut Spam` insert text/label and `spam` lookup string will be -/// suggested. -pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) { - if !ctx.is_param { - return; - } - - let mut params = FxHashMap::default(); - for node in ctx.leaf.ancestors() { - let _ = visitor_ctx(&mut params) - .visit::(process) - .visit::(process) - .accept(node); - } - params - .into_iter() - .filter_map(|(label, (count, param))| { - let lookup = param.pat()?.syntax().text().to_string(); - if count < 2 { - None - } else { - Some((label, lookup)) - } - }) - .for_each(|(label, lookup)| { - CompletionItem::new(CompletionKind::Magic, label) - .lookup_by(lookup) - .add_to(acc) - }); - - fn process<'a, N: ast::FnDefOwner>( - node: &'a N, - params: &mut FxHashMap, - ) { - node.functions() - .filter_map(|it| it.param_list()) - .flat_map(|it| it.params()) - .for_each(|param| { - let text = param.syntax().text().to_string(); - params.entry(text).or_insert((0, param)).0 += 1; - }) - } -} - -#[cfg(test)] -mod tests { - use crate::completion::*; - - fn check_magic_completion(code: &str, expected_completions: &str) { - check_completion(code, expected_completions, CompletionKind::Magic); - } - - #[test] - fn test_param_completion_last_param() { - check_magic_completion( - r" - fn foo(file_id: FileId) {} - fn bar(file_id: FileId) {} - fn baz(file<|>) {} - ", - r#"file_id "file_id: FileId""#, - ); - } - - #[test] - fn test_param_completion_nth_param() { - check_magic_completion( - r" - fn foo(file_id: FileId) {} - fn bar(file_id: FileId) {} - fn baz(file<|>, x: i32) {} - ", - r#"file_id "file_id: FileId""#, - ); - } - - #[test] - fn test_param_completion_trait_param() { - check_magic_completion( - r" - pub(crate) trait SourceRoot { - pub fn contains(&self, file_id: FileId) -> bool; - pub fn module_map(&self) -> &ModuleMap; - pub fn lines(&self, file_id: FileId) -> &LineIndex; - pub fn syntax(&self, file<|>) - } - ", - r#"file_id "file_id: FileId""#, - ); - } -} diff --git a/crates/ra_analysis/src/completion/complete_keyword.rs b/crates/ra_analysis/src/completion/complete_keyword.rs deleted file mode 100644 index d350f06ce..000000000 --- a/crates/ra_analysis/src/completion/complete_keyword.rs +++ /dev/null @@ -1,339 +0,0 @@ -use ra_syntax::{ - algo::visit::{visitor, Visitor}, - AstNode, - ast::{self, LoopBodyOwner}, - SyntaxKind::*, SyntaxNode, -}; - -use crate::completion::{CompletionContext, CompletionItem, Completions, CompletionKind, CompletionItemKind}; - -pub(super) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionContext) { - // complete keyword "crate" in use stmt - match (ctx.use_item_syntax.as_ref(), ctx.path_prefix.as_ref()) { - (Some(_), None) => { - CompletionItem::new(CompletionKind::Keyword, "crate") - .kind(CompletionItemKind::Keyword) - .lookup_by("crate") - .snippet("crate::") - .add_to(acc); - CompletionItem::new(CompletionKind::Keyword, "self") - .kind(CompletionItemKind::Keyword) - .lookup_by("self") - .add_to(acc); - CompletionItem::new(CompletionKind::Keyword, "super") - .kind(CompletionItemKind::Keyword) - .lookup_by("super") - .add_to(acc); - } - (Some(_), Some(_)) => { - CompletionItem::new(CompletionKind::Keyword, "self") - .kind(CompletionItemKind::Keyword) - .lookup_by("self") - .add_to(acc); - CompletionItem::new(CompletionKind::Keyword, "super") - .kind(CompletionItemKind::Keyword) - .lookup_by("super") - .add_to(acc); - } - _ => {} - } -} - -fn keyword(kw: &str, snippet: &str) -> CompletionItem { - CompletionItem::new(CompletionKind::Keyword, kw) - .kind(CompletionItemKind::Keyword) - .snippet(snippet) - .build() -} - -pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) { - if !ctx.is_trivial_path { - return; - } - - let fn_def = match ctx.function_syntax { - Some(it) => it, - None => return, - }; - acc.add(keyword("if", "if $0 {}")); - acc.add(keyword("match", "match $0 {}")); - acc.add(keyword("while", "while $0 {}")); - acc.add(keyword("loop", "loop {$0}")); - - if ctx.after_if { - acc.add(keyword("else", "else {$0}")); - acc.add(keyword("else if", "else if $0 {}")); - } - if is_in_loop_body(ctx.leaf) { - if ctx.can_be_stmt { - acc.add(keyword("continue", "continue;")); - acc.add(keyword("break", "break;")); - } else { - acc.add(keyword("continue", "continue")); - acc.add(keyword("break", "break")); - } - } - acc.add_all(complete_return(fn_def, ctx.can_be_stmt)); -} - -fn is_in_loop_body(leaf: &SyntaxNode) -> bool { - for node in leaf.ancestors() { - if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR { - break; - } - let loop_body = visitor() - .visit::(LoopBodyOwner::loop_body) - .visit::(LoopBodyOwner::loop_body) - .visit::(LoopBodyOwner::loop_body) - .accept(node); - if let Some(Some(body)) = loop_body { - if leaf.range().is_subrange(&body.syntax().range()) { - return true; - } - } - } - false -} - -fn complete_return(fn_def: &ast::FnDef, can_be_stmt: bool) -> Option { - let snip = match (can_be_stmt, fn_def.ret_type().is_some()) { - (true, true) => "return $0;", - (true, false) => "return;", - (false, true) => "return $0", - (false, false) => "return", - }; - Some(keyword("return", snip)) -} - -#[cfg(test)] -mod tests { - use crate::completion::{CompletionKind, check_completion}; - fn check_keyword_completion(code: &str, expected_completions: &str) { - check_completion(code, expected_completions, CompletionKind::Keyword); - } - - #[test] - fn completes_keywords_in_use_stmt() { - check_keyword_completion( - r" - use <|> - ", - r#" - crate "crate" "crate::" - self "self" - super "super" - "#, - ); - - check_keyword_completion( - r" - use a::<|> - ", - r#" - self "self" - super "super" - "#, - ); - - check_keyword_completion( - r" - use a::{b, <|>} - ", - r#" - self "self" - super "super" - "#, - ); - } - - #[test] - fn completes_various_keywords_in_function() { - check_keyword_completion( - r" - fn quux() { - <|> - } - ", - r#" - if "if $0 {}" - match "match $0 {}" - while "while $0 {}" - loop "loop {$0}" - return "return;" - "#, - ); - } - - #[test] - fn completes_else_after_if() { - check_keyword_completion( - r" - fn quux() { - if true { - () - } <|> - } - ", - r#" - if "if $0 {}" - match "match $0 {}" - while "while $0 {}" - loop "loop {$0}" - else "else {$0}" - else if "else if $0 {}" - return "return;" - "#, - ); - } - - #[test] - fn test_completion_return_value() { - check_keyword_completion( - r" - fn quux() -> i32 { - <|> - 92 - } - ", - r#" - if "if $0 {}" - match "match $0 {}" - while "while $0 {}" - loop "loop {$0}" - return "return $0;" - "#, - ); - check_keyword_completion( - r" - fn quux() { - <|> - 92 - } - ", - r#" - if "if $0 {}" - match "match $0 {}" - while "while $0 {}" - loop "loop {$0}" - return "return;" - "#, - ); - } - - #[test] - fn dont_add_semi_after_return_if_not_a_statement() { - check_keyword_completion( - r" - fn quux() -> i32 { - match () { - () => <|> - } - } - ", - r#" - if "if $0 {}" - match "match $0 {}" - while "while $0 {}" - loop "loop {$0}" - return "return $0" - "#, - ); - } - - #[test] - fn last_return_in_block_has_semi() { - check_keyword_completion( - r" - fn quux() -> i32 { - if condition { - <|> - } - } - ", - r#" - if "if $0 {}" - match "match $0 {}" - while "while $0 {}" - loop "loop {$0}" - return "return $0;" - "#, - ); - check_keyword_completion( - r" - fn quux() -> i32 { - if condition { - <|> - } - let x = 92; - x - } - ", - r#" - if "if $0 {}" - match "match $0 {}" - while "while $0 {}" - loop "loop {$0}" - return "return $0;" - "#, - ); - } - - #[test] - fn completes_break_and_continue_in_loops() { - check_keyword_completion( - r" - fn quux() -> i32 { - loop { <|> } - } - ", - r#" - if "if $0 {}" - match "match $0 {}" - while "while $0 {}" - loop "loop {$0}" - continue "continue;" - break "break;" - return "return $0;" - "#, - ); - // No completion: lambda isolates control flow - check_keyword_completion( - r" - fn quux() -> i32 { - loop { || { <|> } } - } - ", - r#" - if "if $0 {}" - match "match $0 {}" - while "while $0 {}" - loop "loop {$0}" - return "return $0;" - "#, - ); - } - - #[test] - fn no_semi_after_break_continue_in_expr() { - check_keyword_completion( - r" - fn f() { - loop { - match () { - () => br<|> - } - } - } - ", - r#" - if "if $0 {}" - match "match $0 {}" - while "while $0 {}" - loop "loop {$0}" - continue "continue" - break "break" - return "return" - "#, - ) - } -} diff --git a/crates/ra_analysis/src/completion/complete_path.rs b/crates/ra_analysis/src/completion/complete_path.rs deleted file mode 100644 index 4723a65a6..000000000 --- a/crates/ra_analysis/src/completion/complete_path.rs +++ /dev/null @@ -1,128 +0,0 @@ -use crate::{ - Cancelable, - completion::{CompletionItem, CompletionItemKind, Completions, CompletionKind, CompletionContext}, -}; - -pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) -> Cancelable<()> { - let (path, module) = match (&ctx.path_prefix, &ctx.module) { - (Some(path), Some(module)) => (path.clone(), module), - _ => return Ok(()), - }; - let def_id = match module.resolve_path(ctx.db, &path)?.take_types() { - Some(it) => it, - None => return Ok(()), - }; - match def_id.resolve(ctx.db)? { - hir::Def::Module(module) => { - let module_scope = module.scope(ctx.db)?; - module_scope.entries().for_each(|(name, res)| { - CompletionItem::new(CompletionKind::Reference, name.to_string()) - .from_resolution(ctx, res) - .add_to(acc) - }); - } - hir::Def::Enum(e) => e - .variants(ctx.db)? - .into_iter() - .for_each(|(name, _variant)| { - CompletionItem::new(CompletionKind::Reference, name.to_string()) - .kind(CompletionItemKind::EnumVariant) - .add_to(acc) - }), - _ => return Ok(()), - }; - Ok(()) -} - -#[cfg(test)] -mod tests { - use crate::completion::{CompletionKind, check_completion}; - - fn check_reference_completion(code: &str, expected_completions: &str) { - check_completion(code, expected_completions, CompletionKind::Reference); - } - - #[test] - fn completes_use_item_starting_with_self() { - check_reference_completion( - r" - use self::m::<|>; - - mod m { - struct Bar; - } - ", - "Bar", - ); - } - - #[test] - fn completes_use_item_starting_with_crate() { - check_reference_completion( - " - //- /lib.rs - mod foo; - struct Spam; - //- /foo.rs - use crate::Sp<|> - ", - "Spam;foo", - ); - } - - #[test] - fn completes_nested_use_tree() { - check_reference_completion( - " - //- /lib.rs - mod foo; - struct Spam; - //- /foo.rs - use crate::{Sp<|>}; - ", - "Spam;foo", - ); - } - - #[test] - fn completes_deeply_nested_use_tree() { - check_reference_completion( - " - //- /lib.rs - mod foo; - pub mod bar { - pub mod baz { - pub struct Spam; - } - } - //- /foo.rs - use crate::{bar::{baz::Sp<|>}}; - ", - "Spam", - ); - } - - #[test] - fn completes_enum_variant() { - check_reference_completion( - " - //- /lib.rs - enum E { Foo, Bar(i32) } - fn foo() { let _ = E::<|> } - ", - "Foo;Bar", - ); - } - - #[test] - fn dont_render_function_parens_in_use_item() { - check_reference_completion( - " - //- /lib.rs - mod m { pub fn foo() {} } - use crate::m::f<|>; - ", - "foo", - ) - } -} diff --git a/crates/ra_analysis/src/completion/complete_scope.rs b/crates/ra_analysis/src/completion/complete_scope.rs deleted file mode 100644 index ee9052d3d..000000000 --- a/crates/ra_analysis/src/completion/complete_scope.rs +++ /dev/null @@ -1,192 +0,0 @@ -use rustc_hash::FxHashSet; -use ra_syntax::TextUnit; - -use crate::{ - Cancelable, - completion::{CompletionItem, CompletionItemKind, Completions, CompletionKind, CompletionContext}, -}; - -pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) -> Cancelable<()> { - if !ctx.is_trivial_path { - return Ok(()); - } - let module = match &ctx.module { - Some(it) => it, - None => return Ok(()), - }; - if let Some(function) = &ctx.function { - let scopes = function.scopes(ctx.db)?; - complete_fn(acc, &scopes, ctx.offset); - } - - let module_scope = module.scope(ctx.db)?; - let (file_id, _) = module.defenition_source(ctx.db)?; - module_scope - .entries() - .filter(|(_name, res)| { - // Don't expose this item - // FIXME: this penetrates through all kinds of abstractions, - // we need to figura out the way to do it less ugly. - match res.import { - None => true, - Some(import) => { - let range = import.range(ctx.db, file_id); - !range.is_subrange(&ctx.leaf.range()) - } - } - }) - .for_each(|(name, res)| { - CompletionItem::new(CompletionKind::Reference, name.to_string()) - .from_resolution(ctx, res) - .add_to(acc) - }); - Ok(()) -} - -fn complete_fn(acc: &mut Completions, scopes: &hir::ScopesWithSyntaxMapping, offset: TextUnit) { - let mut shadowed = FxHashSet::default(); - scopes - .scope_chain_for_offset(offset) - .flat_map(|scope| scopes.scopes.entries(scope).iter()) - .filter(|entry| shadowed.insert(entry.name())) - .for_each(|entry| { - CompletionItem::new(CompletionKind::Reference, entry.name().to_string()) - .kind(CompletionItemKind::Binding) - .add_to(acc) - }); -} - -#[cfg(test)] -mod tests { - use crate::completion::{CompletionKind, check_completion}; - - fn check_reference_completion(code: &str, expected_completions: &str) { - check_completion(code, expected_completions, CompletionKind::Reference); - } - - #[test] - fn completes_bindings_from_let() { - check_reference_completion( - r" - fn quux(x: i32) { - let y = 92; - 1 + <|>; - let z = (); - } - ", - r#"y;x;quux "quux($0)""#, - ); - } - - #[test] - fn completes_bindings_from_if_let() { - check_reference_completion( - r" - fn quux() { - if let Some(x) = foo() { - let y = 92; - }; - if let Some(a) = bar() { - let b = 62; - 1 + <|> - } - } - ", - r#"b;a;quux "quux()$0""#, - ); - } - - #[test] - fn completes_bindings_from_for() { - check_reference_completion( - r" - fn quux() { - for x in &[1, 2, 3] { - <|> - } - } - ", - r#"x;quux "quux()$0""#, - ); - } - - #[test] - fn completes_module_items() { - check_reference_completion( - r" - struct Foo; - enum Baz {} - fn quux() { - <|> - } - ", - r#"quux "quux()$0";Foo;Baz"#, - ); - } - - #[test] - fn completes_module_items_in_nested_modules() { - check_reference_completion( - r" - struct Foo; - mod m { - struct Bar; - fn quux() { <|> } - } - ", - r#"quux "quux()$0";Bar"#, - ); - } - - #[test] - fn completes_return_type() { - check_reference_completion( - r" - struct Foo; - fn x() -> <|> - ", - r#"Foo;x "x()$0""#, - ) - } - - #[test] - fn dont_show_both_completions_for_shadowing() { - check_reference_completion( - r" - fn foo() -> { - let bar = 92; - { - let bar = 62; - <|> - } - } - ", - r#"bar;foo "foo()$0""#, - ) - } - - #[test] - fn completes_self_in_methods() { - check_reference_completion(r"impl S { fn foo(&self) { <|> } }", "self") - } - - #[test] - fn inserts_parens_for_function_calls() { - check_reference_completion( - r" - fn no_args() {} - fn main() { no_<|> } - ", - r#"no_args "no_args()$0" - main "main()$0""#, - ); - check_reference_completion( - r" - fn with_args(x: i32, y: String) {} - fn main() { with_<|> } - ", - r#"main "main()$0" - with_args "with_args($0)""#, - ); - } -} diff --git a/crates/ra_analysis/src/completion/complete_snippet.rs b/crates/ra_analysis/src/completion/complete_snippet.rs deleted file mode 100644 index a495751dd..000000000 --- a/crates/ra_analysis/src/completion/complete_snippet.rs +++ /dev/null @@ -1,73 +0,0 @@ -use crate::completion::{CompletionItem, Completions, CompletionKind, CompletionItemKind, CompletionContext, completion_item::Builder}; - -fn snippet(label: &str, snippet: &str) -> Builder { - CompletionItem::new(CompletionKind::Snippet, label) - .snippet(snippet) - .kind(CompletionItemKind::Snippet) -} - -pub(super) fn complete_expr_snippet(acc: &mut Completions, ctx: &CompletionContext) { - if !(ctx.is_trivial_path && ctx.function_syntax.is_some()) { - return; - } - snippet("pd", "eprintln!(\"$0 = {:?}\", $0);").add_to(acc); - snippet("ppd", "eprintln!(\"$0 = {:#?}\", $0);").add_to(acc); -} - -pub(super) fn complete_item_snippet(acc: &mut Completions, ctx: &CompletionContext) { - if !ctx.is_new_item { - return; - } - snippet( - "Test function", - "\ -#[test] -fn ${1:feature}() { - $0 -}", - ) - .lookup_by("tfn") - .add_to(acc); - - snippet("pub(crate)", "pub(crate) $0").add_to(acc); -} - -#[cfg(test)] -mod tests { - use crate::completion::{CompletionKind, check_completion}; - fn check_snippet_completion(code: &str, expected_completions: &str) { - check_completion(code, expected_completions, CompletionKind::Snippet); - } - - #[test] - fn completes_snippets_in_expressions() { - check_snippet_completion( - r"fn foo(x: i32) { <|> }", - r##" - pd "eprintln!(\"$0 = {:?}\", $0);" - ppd "eprintln!(\"$0 = {:#?}\", $0);" - "##, - ); - } - - #[test] - fn completes_snippets_in_items() { - // check_snippet_completion(r" - // <|> - // ", - // r##"[CompletionItem { label: "Test function", lookup: None, snippet: Some("#[test]\nfn test_${1:feature}() {\n$0\n}"##, - // ); - check_snippet_completion( - r" - #[cfg(test)] - mod tests { - <|> - } - ", - r##" - tfn "Test function" "#[test]\nfn ${1:feature}() {\n $0\n}" - pub(crate) "pub(crate) $0" - "##, - ); - } -} diff --git a/crates/ra_analysis/src/completion/completion_context.rs b/crates/ra_analysis/src/completion/completion_context.rs deleted file mode 100644 index 01786bb69..000000000 --- a/crates/ra_analysis/src/completion/completion_context.rs +++ /dev/null @@ -1,205 +0,0 @@ -use ra_text_edit::AtomTextEdit; -use ra_syntax::{ - AstNode, SyntaxNode, SourceFile, TextUnit, TextRange, - ast, - algo::{find_leaf_at_offset, find_covering_node, find_node_at_offset}, - SyntaxKind::*, -}; -use hir::source_binder; - -use crate::{db, FilePosition, Cancelable}; - -/// `CompletionContext` is created early during completion to figure out, where -/// exactly is the cursor, syntax-wise. -#[derive(Debug)] -pub(super) struct CompletionContext<'a> { - pub(super) db: &'a db::RootDatabase, - pub(super) offset: TextUnit, - pub(super) leaf: &'a SyntaxNode, - pub(super) module: Option, - pub(super) function: Option, - pub(super) function_syntax: Option<&'a ast::FnDef>, - pub(super) use_item_syntax: Option<&'a ast::UseItem>, - pub(super) is_param: bool, - /// A single-indent path, like `foo`. - pub(super) is_trivial_path: bool, - /// If not a trivial, path, the prefix (qualifier). - pub(super) path_prefix: Option, - pub(super) after_if: bool, - /// `true` if we are a statement or a last expr in the block. - pub(super) can_be_stmt: bool, - /// Something is typed at the "top" level, in module or impl/trait. - pub(super) is_new_item: bool, - /// The receiver if this is a field or method access, i.e. writing something.<|> - pub(super) dot_receiver: Option<&'a ast::Expr>, - /// If this is a method call in particular, i.e. the () are already there. - pub(super) is_method_call: bool, -} - -impl<'a> CompletionContext<'a> { - pub(super) fn new( - db: &'a db::RootDatabase, - original_file: &'a SourceFile, - position: FilePosition, - ) -> Cancelable>> { - let module = source_binder::module_from_position(db, position)?; - let leaf = - ctry!(find_leaf_at_offset(original_file.syntax(), position.offset).left_biased()); - let mut ctx = CompletionContext { - db, - leaf, - offset: position.offset, - module, - function: None, - function_syntax: None, - use_item_syntax: None, - is_param: false, - is_trivial_path: false, - path_prefix: None, - after_if: false, - can_be_stmt: false, - is_new_item: false, - dot_receiver: None, - is_method_call: false, - }; - ctx.fill(original_file, position.offset); - Ok(Some(ctx)) - } - - fn fill(&mut self, original_file: &'a SourceFile, offset: TextUnit) { - // Insert a fake ident to get a valid parse tree. We will use this file - // to determine context, though the original_file will be used for - // actual completion. - let file = { - let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string()); - original_file.reparse(&edit) - }; - - // First, let's try to complete a reference to some declaration. - if let Some(name_ref) = find_node_at_offset::(file.syntax(), offset) { - // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. - // See RFC#1685. - if is_node::(name_ref.syntax()) { - self.is_param = true; - return; - } - self.classify_name_ref(original_file, name_ref); - } - - // Otherwise, see if this is a declaration. We can use heuristics to - // suggest declaration names, see `CompletionKind::Magic`. - if let Some(name) = find_node_at_offset::(file.syntax(), offset) { - if is_node::(name.syntax()) { - self.is_param = true; - return; - } - } - } - fn classify_name_ref(&mut self, original_file: &'a SourceFile, name_ref: &ast::NameRef) { - let name_range = name_ref.syntax().range(); - let top_node = name_ref - .syntax() - .ancestors() - .take_while(|it| it.range() == name_range) - .last() - .unwrap(); - - match top_node.parent().map(|it| it.kind()) { - Some(SOURCE_FILE) | Some(ITEM_LIST) => { - self.is_new_item = true; - return; - } - _ => (), - } - - self.use_item_syntax = self.leaf.ancestors().find_map(ast::UseItem::cast); - - self.function_syntax = self - .leaf - .ancestors() - .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) - .find_map(ast::FnDef::cast); - match (&self.module, self.function_syntax) { - (Some(module), Some(fn_def)) => { - let function = source_binder::function_from_module(self.db, module, fn_def); - self.function = Some(function); - } - _ => (), - } - - let parent = match name_ref.syntax().parent() { - Some(it) => it, - None => return, - }; - if let Some(segment) = ast::PathSegment::cast(parent) { - let path = segment.parent_path(); - if let Some(mut path) = hir::Path::from_ast(path) { - if !path.is_ident() { - path.segments.pop().unwrap(); - self.path_prefix = Some(path); - return; - } - } - if path.qualifier().is_none() { - self.is_trivial_path = true; - - // Find either enclosing expr statement (thing with `;`) or a - // block. If block, check that we are the last expr. - self.can_be_stmt = name_ref - .syntax() - .ancestors() - .find_map(|node| { - if let Some(stmt) = ast::ExprStmt::cast(node) { - return Some(stmt.syntax().range() == name_ref.syntax().range()); - } - if let Some(block) = ast::Block::cast(node) { - return Some( - block.expr().map(|e| e.syntax().range()) - == Some(name_ref.syntax().range()), - ); - } - None - }) - .unwrap_or(false); - - if let Some(off) = name_ref.syntax().range().start().checked_sub(2.into()) { - if let Some(if_expr) = - find_node_at_offset::(original_file.syntax(), off) - { - if if_expr.syntax().range().end() < name_ref.syntax().range().start() { - self.after_if = true; - } - } - } - } - } - if let Some(field_expr) = ast::FieldExpr::cast(parent) { - // The receiver comes before the point of insertion of the fake - // ident, so it should have the same range in the non-modified file - self.dot_receiver = field_expr - .expr() - .map(|e| e.syntax().range()) - .and_then(|r| find_node_with_range(original_file.syntax(), r)); - } - if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent) { - // As above - self.dot_receiver = method_call_expr - .expr() - .map(|e| e.syntax().range()) - .and_then(|r| find_node_with_range(original_file.syntax(), r)); - self.is_method_call = true; - } - } -} - -fn find_node_with_range(syntax: &SyntaxNode, range: TextRange) -> Option<&N> { - let node = find_covering_node(syntax, range); - node.ancestors().find_map(N::cast) -} - -fn is_node(node: &SyntaxNode) -> bool { - match node.ancestors().filter_map(N::cast).next() { - None => false, - Some(n) => n.syntax().range() == node.range(), - } -} diff --git a/crates/ra_analysis/src/completion/completion_item.rs b/crates/ra_analysis/src/completion/completion_item.rs deleted file mode 100644 index a25b87bee..000000000 --- a/crates/ra_analysis/src/completion/completion_item.rs +++ /dev/null @@ -1,244 +0,0 @@ -use hir::PerNs; - -use crate::completion::CompletionContext; - -/// `CompletionItem` describes a single completion variant in the editor pop-up. -/// It is basically a POD with various properties. To construct a -/// `CompletionItem`, use `new` method and the `Builder` struct. -#[derive(Debug)] -pub struct CompletionItem { - /// Used only internally in tests, to check only specific kind of - /// completion. - completion_kind: CompletionKind, - label: String, - lookup: Option, - snippet: Option, - kind: Option, -} - -pub enum InsertText { - PlainText { text: String }, - Snippet { text: String }, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum CompletionItemKind { - Snippet, - Keyword, - Module, - Function, - Struct, - Enum, - EnumVariant, - Binding, - Field, -} - -#[derive(Debug, PartialEq, Eq)] -pub(crate) enum CompletionKind { - /// Parser-based keyword completion. - Keyword, - /// Your usual "complete all valid identifiers". - Reference, - /// "Secret sauce" completions. - Magic, - Snippet, -} - -impl CompletionItem { - pub(crate) fn new(completion_kind: CompletionKind, label: impl Into) -> Builder { - let label = label.into(); - Builder { - completion_kind, - label, - lookup: None, - snippet: None, - kind: None, - } - } - /// What user sees in pop-up in the UI. - pub fn label(&self) -> &str { - &self.label - } - /// What string is used for filtering. - pub fn lookup(&self) -> &str { - self.lookup - .as_ref() - .map(|it| it.as_str()) - .unwrap_or(self.label()) - } - /// What is inserted. - pub fn insert_text(&self) -> InsertText { - match &self.snippet { - None => InsertText::PlainText { - text: self.label.clone(), - }, - Some(it) => InsertText::Snippet { text: it.clone() }, - } - } - - pub fn kind(&self) -> Option { - self.kind - } -} - -/// A helper to make `CompletionItem`s. -#[must_use] -pub(crate) struct Builder { - completion_kind: CompletionKind, - label: String, - lookup: Option, - snippet: Option, - kind: Option, -} - -impl Builder { - pub(crate) fn add_to(self, acc: &mut Completions) { - acc.add(self.build()) - } - - pub(crate) fn build(self) -> CompletionItem { - CompletionItem { - label: self.label, - lookup: self.lookup, - snippet: self.snippet, - kind: self.kind, - completion_kind: self.completion_kind, - } - } - pub(crate) fn lookup_by(mut self, lookup: impl Into) -> Builder { - self.lookup = Some(lookup.into()); - self - } - pub(crate) fn snippet(mut self, snippet: impl Into) -> Builder { - self.snippet = Some(snippet.into()); - self - } - pub(crate) fn kind(mut self, kind: CompletionItemKind) -> Builder { - self.kind = Some(kind); - self - } - pub(super) fn from_resolution( - mut self, - ctx: &CompletionContext, - resolution: &hir::Resolution, - ) -> Builder { - let resolved = resolution.def_id.and_then(|d| d.resolve(ctx.db).ok()); - let kind = match resolved { - PerNs { - types: Some(hir::Def::Module(..)), - .. - } => CompletionItemKind::Module, - PerNs { - types: Some(hir::Def::Struct(..)), - .. - } => CompletionItemKind::Struct, - PerNs { - types: Some(hir::Def::Enum(..)), - .. - } => CompletionItemKind::Enum, - PerNs { - values: Some(hir::Def::Function(function)), - .. - } => return self.from_function(ctx, function), - _ => return self, - }; - self.kind = Some(kind); - self - } - - fn from_function(mut self, ctx: &CompletionContext, function: hir::Function) -> Builder { - // If not an import, add parenthesis automatically. - if ctx.use_item_syntax.is_none() { - if function.signature(ctx.db).args().is_empty() { - self.snippet = Some(format!("{}()$0", self.label)); - } else { - self.snippet = Some(format!("{}($0)", self.label)); - } - } - self.kind = Some(CompletionItemKind::Function); - self - } -} - -impl Into for Builder { - fn into(self) -> CompletionItem { - self.build() - } -} - -/// Represents an in-progress set of completions being built. -#[derive(Debug, Default)] -pub(crate) struct Completions { - buf: Vec, -} - -impl Completions { - pub(crate) fn add(&mut self, item: impl Into) { - self.buf.push(item.into()) - } - pub(crate) fn add_all(&mut self, items: I) - where - I: IntoIterator, - I::Item: Into, - { - items.into_iter().for_each(|item| self.add(item.into())) - } - - #[cfg(test)] - pub(crate) fn assert_match(&self, expected: &str, kind: CompletionKind) { - let expected = normalize(expected); - let actual = self.debug_render(kind); - test_utils::assert_eq_text!(expected.as_str(), actual.as_str(),); - - /// Normalize the textual representation of `Completions`: - /// replace `;` with newlines, normalize whitespace - fn normalize(expected: &str) -> String { - use ra_syntax::{tokenize, TextUnit, TextRange, SyntaxKind::SEMI}; - let mut res = String::new(); - for line in expected.trim().lines() { - let line = line.trim(); - let mut start_offset: TextUnit = 0.into(); - // Yep, we use rust tokenize in completion tests :-) - for token in tokenize(line) { - let range = TextRange::offset_len(start_offset, token.len); - start_offset += token.len; - if token.kind == SEMI { - res.push('\n'); - } else { - res.push_str(&line[range]); - } - } - - res.push('\n'); - } - res - } - } - - #[cfg(test)] - fn debug_render(&self, kind: CompletionKind) -> String { - let mut res = String::new(); - for c in self.buf.iter() { - if c.completion_kind == kind { - if let Some(lookup) = &c.lookup { - res.push_str(lookup); - res.push_str(&format!(" {:?}", c.label)); - } else { - res.push_str(&c.label); - } - if let Some(snippet) = &c.snippet { - res.push_str(&format!(" {:?}", snippet)); - } - res.push('\n'); - } - } - res - } -} - -impl Into> for Completions { - fn into(self) -> Vec { - self.buf - } -} diff --git a/crates/ra_analysis/src/db.rs b/crates/ra_analysis/src/db.rs deleted file mode 100644 index 9d46609ec..000000000 --- a/crates/ra_analysis/src/db.rs +++ /dev/null @@ -1,128 +0,0 @@ -use std::{fmt, sync::Arc}; - -use salsa::{self, Database}; -use ra_db::{LocationIntener, BaseDatabase, FileId}; - -use crate::{symbol_index, LineIndex}; - -#[derive(Debug)] -pub(crate) struct RootDatabase { - runtime: salsa::Runtime, - id_maps: Arc, -} - -#[derive(Default)] -struct IdMaps { - defs: LocationIntener, - macros: LocationIntener, -} - -impl fmt::Debug for IdMaps { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("IdMaps") - .field("n_defs", &self.defs.len()) - .finish() - } -} - -impl salsa::Database for RootDatabase { - fn salsa_runtime(&self) -> &salsa::Runtime { - &self.runtime - } -} - -impl Default for RootDatabase { - fn default() -> RootDatabase { - let mut db = RootDatabase { - runtime: salsa::Runtime::default(), - id_maps: Default::default(), - }; - db.query_mut(ra_db::CrateGraphQuery) - .set((), Default::default()); - db.query_mut(ra_db::LocalRootsQuery) - .set((), Default::default()); - db.query_mut(ra_db::LibraryRootsQuery) - .set((), Default::default()); - db - } -} - -impl salsa::ParallelDatabase for RootDatabase { - fn snapshot(&self) -> salsa::Snapshot { - salsa::Snapshot::new(RootDatabase { - runtime: self.runtime.snapshot(self), - id_maps: self.id_maps.clone(), - }) - } -} - -impl BaseDatabase for RootDatabase {} - -impl AsRef> for RootDatabase { - fn as_ref(&self) -> &LocationIntener { - &self.id_maps.defs - } -} - -impl AsRef> for RootDatabase { - fn as_ref(&self) -> &LocationIntener { - &self.id_maps.macros - } -} - -salsa::query_group! { - pub(crate) trait LineIndexDatabase: ra_db::FilesDatabase + BaseDatabase { - fn line_index(file_id: FileId) -> Arc { - type LineIndexQuery; - } - } -} - -fn line_index(db: &impl ra_db::FilesDatabase, file_id: FileId) -> Arc { - let text = db.file_text(file_id); - Arc::new(LineIndex::new(&*text)) -} - -salsa::database_storage! { - pub(crate) struct RootDatabaseStorage for RootDatabase { - impl ra_db::FilesDatabase { - fn file_text() for ra_db::FileTextQuery; - fn file_relative_path() for ra_db::FileRelativePathQuery; - fn file_source_root() for ra_db::FileSourceRootQuery; - fn source_root() for ra_db::SourceRootQuery; - fn local_roots() for ra_db::LocalRootsQuery; - fn library_roots() for ra_db::LibraryRootsQuery; - fn crate_graph() for ra_db::CrateGraphQuery; - } - impl ra_db::SyntaxDatabase { - fn source_file() for ra_db::SourceFileQuery; - } - impl LineIndexDatabase { - fn line_index() for LineIndexQuery; - } - impl symbol_index::SymbolsDatabase { - fn file_symbols() for symbol_index::FileSymbolsQuery; - fn library_symbols() for symbol_index::LibrarySymbolsQuery; - } - impl hir::db::HirDatabase { - fn hir_source_file() for hir::db::HirSourceFileQuery; - fn expand_macro_invocation() for hir::db::ExpandMacroCallQuery; - fn module_tree() for hir::db::ModuleTreeQuery; - fn fn_scopes() for hir::db::FnScopesQuery; - fn file_items() for hir::db::SourceFileItemsQuery; - fn file_item() for hir::db::FileItemQuery; - fn input_module_items() for hir::db::InputModuleItemsQuery; - fn item_map() for hir::db::ItemMapQuery; - fn submodules() for hir::db::SubmodulesQuery; - fn infer() for hir::db::InferQuery; - fn type_for_def() for hir::db::TypeForDefQuery; - fn type_for_field() for hir::db::TypeForFieldQuery; - fn struct_data() for hir::db::StructDataQuery; - fn enum_data() for hir::db::EnumDataQuery; - fn impls_in_module() for hir::db::ImplsInModuleQuery; - fn body_hir() for hir::db::BodyHirQuery; - fn body_syntax_mapping() for hir::db::BodySyntaxMappingQuery; - fn fn_signature() for hir::db::FnSignatureQuery; - } - } -} diff --git a/crates/ra_analysis/src/extend_selection.rs b/crates/ra_analysis/src/extend_selection.rs deleted file mode 100644 index c3c809c9f..000000000 --- a/crates/ra_analysis/src/extend_selection.rs +++ /dev/null @@ -1,56 +0,0 @@ -use ra_db::SyntaxDatabase; -use ra_syntax::{ - SyntaxNode, AstNode, SourceFile, - ast, algo::find_covering_node, -}; - -use crate::{ - TextRange, FileRange, - db::RootDatabase, -}; - -pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { - let source_file = db.source_file(frange.file_id); - if let Some(range) = extend_selection_in_macro(db, &source_file, frange) { - return range; - } - ra_ide_api_light::extend_selection(source_file.syntax(), frange.range).unwrap_or(frange.range) -} - -fn extend_selection_in_macro( - _db: &RootDatabase, - source_file: &SourceFile, - frange: FileRange, -) -> Option { - let macro_call = find_macro_call(source_file.syntax(), frange.range)?; - let (off, exp) = hir::MacroDef::ast_expand(macro_call)?; - let dst_range = exp.map_range_forward(frange.range - off)?; - let dst_range = ra_ide_api_light::extend_selection(&exp.syntax(), dst_range)?; - let src_range = exp.map_range_back(dst_range)? + off; - Some(src_range) -} - -fn find_macro_call(node: &SyntaxNode, range: TextRange) -> Option<&ast::MacroCall> { - find_covering_node(node, range) - .ancestors() - .find_map(ast::MacroCall::cast) -} - -#[cfg(test)] -mod tests { - use crate::mock_analysis::single_file_with_range; - use test_utils::assert_eq_dbg; - - #[test] - fn extend_selection_inside_macros() { - let (analysis, frange) = single_file_with_range( - " - fn main() { - ctry!(foo(|x| <|>x<|>)); - } - ", - ); - let r = analysis.extend_selection(frange); - assert_eq_dbg("[51; 56)", &r); - } -} diff --git a/crates/ra_analysis/src/goto_defenition.rs b/crates/ra_analysis/src/goto_defenition.rs deleted file mode 100644 index fcd8d315e..000000000 --- a/crates/ra_analysis/src/goto_defenition.rs +++ /dev/null @@ -1,139 +0,0 @@ -use ra_db::{FileId, Cancelable, SyntaxDatabase}; -use ra_syntax::{ - TextRange, AstNode, ast, SyntaxKind::{NAME, MODULE}, - algo::find_node_at_offset, -}; - -use crate::{FilePosition, NavigationTarget, db::RootDatabase}; - -pub(crate) fn goto_defenition( - db: &RootDatabase, - position: FilePosition, -) -> Cancelable>> { - let file = db.source_file(position.file_id); - let syntax = file.syntax(); - if let Some(name_ref) = find_node_at_offset::(syntax, position.offset) { - return Ok(Some(reference_defenition(db, position.file_id, name_ref)?)); - } - if let Some(name) = find_node_at_offset::(syntax, position.offset) { - return name_defenition(db, position.file_id, name); - } - Ok(None) -} - -pub(crate) fn reference_defenition( - db: &RootDatabase, - file_id: FileId, - name_ref: &ast::NameRef, -) -> Cancelable> { - if let Some(fn_descr) = - hir::source_binder::function_from_child_node(db, file_id, name_ref.syntax())? - { - let scope = fn_descr.scopes(db)?; - // First try to resolve the symbol locally - if let Some(entry) = scope.resolve_local_name(name_ref) { - let nav = NavigationTarget { - file_id, - name: entry.name().to_string().into(), - range: entry.ptr().range(), - kind: NAME, - ptr: None, - }; - return Ok(vec![nav]); - }; - } - // If that fails try the index based approach. - let navs = db - .index_resolve(name_ref)? - .into_iter() - .map(NavigationTarget::from_symbol) - .collect(); - Ok(navs) -} - -fn name_defenition( - db: &RootDatabase, - file_id: FileId, - name: &ast::Name, -) -> Cancelable>> { - if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) { - if module.has_semi() { - if let Some(child_module) = - hir::source_binder::module_from_declaration(db, file_id, module)? - { - let (file_id, _) = child_module.defenition_source(db)?; - let name = match child_module.name(db)? { - Some(name) => name.to_string().into(), - None => "".into(), - }; - let nav = NavigationTarget { - file_id, - name, - range: TextRange::offset_len(0.into(), 0.into()), - kind: MODULE, - ptr: None, - }; - return Ok(Some(vec![nav])); - } - } - } - Ok(None) -} - -#[cfg(test)] -mod tests { - use test_utils::assert_eq_dbg; - use crate::mock_analysis::analysis_and_position; - - #[test] - fn goto_defenition_works_in_items() { - let (analysis, pos) = analysis_and_position( - " - //- /lib.rs - struct Foo; - enum E { X(Foo<|>) } - ", - ); - - let symbols = analysis.goto_defenition(pos).unwrap().unwrap(); - assert_eq_dbg( - r#"[NavigationTarget { file_id: FileId(1), name: "Foo", - kind: STRUCT_DEF, range: [0; 11), - ptr: Some(LocalSyntaxPtr { range: [0; 11), kind: STRUCT_DEF }) }]"#, - &symbols, - ); - } - - #[test] - fn goto_defenition_works_for_module_declaration() { - let (analysis, pos) = analysis_and_position( - " - //- /lib.rs - mod <|>foo; - //- /foo.rs - // empty - ", - ); - - let symbols = analysis.goto_defenition(pos).unwrap().unwrap(); - assert_eq_dbg( - r#"[NavigationTarget { file_id: FileId(2), name: "foo", kind: MODULE, range: [0; 0), ptr: None }]"#, - &symbols, - ); - - let (analysis, pos) = analysis_and_position( - " - //- /lib.rs - mod <|>foo; - //- /foo/mod.rs - // empty - ", - ); - - let symbols = analysis.goto_defenition(pos).unwrap().unwrap(); - assert_eq_dbg( - r#"[NavigationTarget { file_id: FileId(2), name: "foo", kind: MODULE, range: [0; 0), ptr: None }]"#, - &symbols, - ); - } -} diff --git a/crates/ra_analysis/src/hover.rs b/crates/ra_analysis/src/hover.rs deleted file mode 100644 index 475524ee1..000000000 --- a/crates/ra_analysis/src/hover.rs +++ /dev/null @@ -1,257 +0,0 @@ -use ra_db::{Cancelable, SyntaxDatabase}; -use ra_syntax::{ - AstNode, SyntaxNode, TreePtr, - ast::{self, NameOwner}, - algo::{find_covering_node, find_node_at_offset, find_leaf_at_offset, visit::{visitor, Visitor}}, -}; - -use crate::{db::RootDatabase, RangeInfo, FilePosition, FileRange, NavigationTarget}; - -pub(crate) fn hover( - db: &RootDatabase, - position: FilePosition, -) -> Cancelable>> { - let file = db.source_file(position.file_id); - let mut res = Vec::new(); - - let mut range = None; - if let Some(name_ref) = find_node_at_offset::(file.syntax(), position.offset) { - let navs = crate::goto_defenition::reference_defenition(db, position.file_id, name_ref)?; - for nav in navs { - res.extend(doc_text_for(db, nav)?) - } - if !res.is_empty() { - range = Some(name_ref.syntax().range()) - } - } - if range.is_none() { - let node = find_leaf_at_offset(file.syntax(), position.offset).find_map(|leaf| { - leaf.ancestors() - .find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some()) - }); - let node = ctry!(node); - let frange = FileRange { - file_id: position.file_id, - range: node.range(), - }; - res.extend(type_of(db, frange)?); - range = Some(node.range()); - }; - - let range = ctry!(range); - if res.is_empty() { - return Ok(None); - } - let res = RangeInfo::new(range, res.join("\n\n---\n")); - Ok(Some(res)) -} - -pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Cancelable> { - let file = db.source_file(frange.file_id); - let syntax = file.syntax(); - let leaf_node = find_covering_node(syntax, frange.range); - // if we picked identifier, expand to pattern/expression - let node = leaf_node - .ancestors() - .take_while(|it| it.range() == leaf_node.range()) - .find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some()) - .unwrap_or(leaf_node); - let parent_fn = ctry!(node.ancestors().find_map(ast::FnDef::cast)); - let function = ctry!(hir::source_binder::function_from_source( - db, - frange.file_id, - parent_fn - )?); - let infer = function.infer(db)?; - let syntax_mapping = function.body_syntax_mapping(db)?; - if let Some(expr) = ast::Expr::cast(node).and_then(|e| syntax_mapping.node_expr(e)) { - Ok(Some(infer[expr].to_string())) - } else if let Some(pat) = ast::Pat::cast(node).and_then(|p| syntax_mapping.node_pat(p)) { - Ok(Some(infer[pat].to_string())) - } else { - Ok(None) - } -} - -// FIXME: this should not really use navigation target. Rather, approximatelly -// resovled symbol should return a `DefId`. -fn doc_text_for(db: &RootDatabase, nav: NavigationTarget) -> Cancelable> { - let result = match (nav.description(db), nav.docs(db)) { - (Some(desc), Some(docs)) => Some("```rust\n".to_string() + &*desc + "\n```\n\n" + &*docs), - (Some(desc), None) => Some("```rust\n".to_string() + &*desc + "\n```"), - (None, Some(docs)) => Some(docs), - _ => None, - }; - - Ok(result) -} - -impl NavigationTarget { - fn node(&self, db: &RootDatabase) -> Option> { - let source_file = db.source_file(self.file_id); - let source_file = source_file.syntax(); - let node = source_file - .descendants() - .find(|node| node.kind() == self.kind && node.range() == self.range)? - .to_owned(); - Some(node) - } - - fn docs(&self, db: &RootDatabase) -> Option { - let node = self.node(db)?; - fn doc_comments(node: &N) -> Option { - let comments = node.doc_comment_text(); - if comments.is_empty() { - None - } else { - Some(comments) - } - } - - visitor() - .visit(doc_comments::) - .visit(doc_comments::) - .visit(doc_comments::) - .visit(doc_comments::) - .visit(doc_comments::) - .visit(doc_comments::) - .visit(doc_comments::) - .visit(doc_comments::) - .accept(&node)? - } - - /// Get a description of this node. - /// - /// e.g. `struct Name`, `enum Name`, `fn Name` - fn description(&self, db: &RootDatabase) -> Option { - // TODO: After type inference is done, add type information to improve the output - let node = self.node(db)?; - // TODO: Refactor to be have less repetition - visitor() - .visit(|node: &ast::FnDef| { - let mut string = "fn ".to_string(); - node.name()?.syntax().text().push_to(&mut string); - Some(string) - }) - .visit(|node: &ast::StructDef| { - let mut string = "struct ".to_string(); - node.name()?.syntax().text().push_to(&mut string); - Some(string) - }) - .visit(|node: &ast::EnumDef| { - let mut string = "enum ".to_string(); - node.name()?.syntax().text().push_to(&mut string); - Some(string) - }) - .visit(|node: &ast::TraitDef| { - let mut string = "trait ".to_string(); - node.name()?.syntax().text().push_to(&mut string); - Some(string) - }) - .visit(|node: &ast::Module| { - let mut string = "mod ".to_string(); - node.name()?.syntax().text().push_to(&mut string); - Some(string) - }) - .visit(|node: &ast::TypeDef| { - let mut string = "type ".to_string(); - node.name()?.syntax().text().push_to(&mut string); - Some(string) - }) - .visit(|node: &ast::ConstDef| { - let mut string = "const ".to_string(); - node.name()?.syntax().text().push_to(&mut string); - Some(string) - }) - .visit(|node: &ast::StaticDef| { - let mut string = "static ".to_string(); - node.name()?.syntax().text().push_to(&mut string); - Some(string) - }) - .accept(&node)? - } -} - -#[cfg(test)] -mod tests { - use ra_syntax::TextRange; - use crate::mock_analysis::{single_file_with_position, single_file_with_range}; - - #[test] - fn hover_shows_type_of_an_expression() { - let (analysis, position) = single_file_with_position( - " - pub fn foo() -> u32 { 1 } - - fn main() { - let foo_test = foo()<|>; - } - ", - ); - let hover = analysis.hover(position).unwrap().unwrap(); - assert_eq!(hover.range, TextRange::from_to(95.into(), 100.into())); - assert_eq!(hover.info, "u32"); - } - - #[test] - fn hover_for_local_variable() { - let (analysis, position) = single_file_with_position("fn func(foo: i32) { fo<|>o; }"); - let hover = analysis.hover(position).unwrap().unwrap(); - assert_eq!(hover.info, "i32"); - } - - #[test] - fn hover_for_local_variable_pat() { - let (analysis, position) = single_file_with_position("fn func(fo<|>o: i32) {}"); - let hover = analysis.hover(position).unwrap().unwrap(); - assert_eq!(hover.info, "i32"); - } - - #[test] - fn test_type_of_for_function() { - let (analysis, range) = single_file_with_range( - " - pub fn foo() -> u32 { 1 }; - - fn main() { - let foo_test = <|>foo()<|>; - } - ", - ); - - let type_name = analysis.type_of(range).unwrap().unwrap(); - assert_eq!("u32", &type_name); - } - - // FIXME: improve type_of to make this work - #[test] - fn test_type_of_for_expr_1() { - let (analysis, range) = single_file_with_range( - " - fn main() { - let foo = <|>1 + foo_test<|>; - } - ", - ); - - let type_name = analysis.type_of(range).unwrap().unwrap(); - assert_eq!("[unknown]", &type_name); - } - - // FIXME: improve type_of to make this work - #[test] - fn test_type_of_for_expr_2() { - let (analysis, range) = single_file_with_range( - " - fn main() { - let foo: usize = 1; - let bar = <|>1 + foo_test<|>; - } - ", - ); - - let type_name = analysis.type_of(range).unwrap().unwrap(); - assert_eq!("[unknown]", &type_name); - } - -} diff --git a/crates/ra_analysis/src/imp.rs b/crates/ra_analysis/src/imp.rs deleted file mode 100644 index 7c60ab7d6..000000000 --- a/crates/ra_analysis/src/imp.rs +++ /dev/null @@ -1,309 +0,0 @@ -use std::sync::Arc; - -use salsa::Database; - -use hir::{ - self, Problem, source_binder, -}; -use ra_db::{FilesDatabase, SourceRoot, SourceRootId, SyntaxDatabase}; -use ra_ide_api_light::{self, assists, LocalEdit, Severity}; -use ra_syntax::{ - TextRange, AstNode, SourceFile, - ast::{self, NameOwner}, - algo::find_node_at_offset, - SyntaxKind::*, -}; - -use crate::{ - AnalysisChange, - Cancelable, NavigationTarget, - CrateId, db, Diagnostic, FileId, FilePosition, FileRange, FileSystemEdit, - Query, RootChange, SourceChange, SourceFileEdit, - symbol_index::{LibrarySymbolsQuery, FileSymbol}, -}; - -impl db::RootDatabase { - pub(crate) fn apply_change(&mut self, change: AnalysisChange) { - log::info!("apply_change {:?}", change); - // self.gc_syntax_trees(); - if !change.new_roots.is_empty() { - let mut local_roots = Vec::clone(&self.local_roots()); - for (root_id, is_local) in change.new_roots { - self.query_mut(ra_db::SourceRootQuery) - .set(root_id, Default::default()); - if is_local { - local_roots.push(root_id); - } - } - self.query_mut(ra_db::LocalRootsQuery) - .set((), Arc::new(local_roots)); - } - - for (root_id, root_change) in change.roots_changed { - self.apply_root_change(root_id, root_change); - } - for (file_id, text) in change.files_changed { - self.query_mut(ra_db::FileTextQuery).set(file_id, text) - } - if !change.libraries_added.is_empty() { - let mut libraries = Vec::clone(&self.library_roots()); - for library in change.libraries_added { - libraries.push(library.root_id); - self.query_mut(ra_db::SourceRootQuery) - .set(library.root_id, Default::default()); - self.query_mut(LibrarySymbolsQuery) - .set_constant(library.root_id, Arc::new(library.symbol_index)); - self.apply_root_change(library.root_id, library.root_change); - } - self.query_mut(ra_db::LibraryRootsQuery) - .set((), Arc::new(libraries)); - } - if let Some(crate_graph) = change.crate_graph { - self.query_mut(ra_db::CrateGraphQuery) - .set((), Arc::new(crate_graph)) - } - } - - fn apply_root_change(&mut self, root_id: SourceRootId, root_change: RootChange) { - let mut source_root = SourceRoot::clone(&self.source_root(root_id)); - for add_file in root_change.added { - self.query_mut(ra_db::FileTextQuery) - .set(add_file.file_id, add_file.text); - self.query_mut(ra_db::FileRelativePathQuery) - .set(add_file.file_id, add_file.path.clone()); - self.query_mut(ra_db::FileSourceRootQuery) - .set(add_file.file_id, root_id); - source_root.files.insert(add_file.path, add_file.file_id); - } - for remove_file in root_change.removed { - self.query_mut(ra_db::FileTextQuery) - .set(remove_file.file_id, Default::default()); - source_root.files.remove(&remove_file.path); - } - self.query_mut(ra_db::SourceRootQuery) - .set(root_id, Arc::new(source_root)); - } - - #[allow(unused)] - /// Ideally, we should call this function from time to time to collect heavy - /// syntax trees. However, if we actually do that, everything is recomputed - /// for some reason. Needs investigation. - fn gc_syntax_trees(&mut self) { - self.query(ra_db::SourceFileQuery) - .sweep(salsa::SweepStrategy::default().discard_values()); - self.query(hir::db::SourceFileItemsQuery) - .sweep(salsa::SweepStrategy::default().discard_values()); - self.query(hir::db::FileItemQuery) - .sweep(salsa::SweepStrategy::default().discard_values()); - } -} - -impl db::RootDatabase { - /// This returns `Vec` because a module may be included from several places. We - /// don't handle this case yet though, so the Vec has length at most one. - pub(crate) fn parent_module( - &self, - position: FilePosition, - ) -> Cancelable> { - let module = match source_binder::module_from_position(self, position)? { - None => return Ok(Vec::new()), - Some(it) => it, - }; - let (file_id, ast_module) = match module.declaration_source(self)? { - None => return Ok(Vec::new()), - Some(it) => it, - }; - let name = ast_module.name().unwrap(); - Ok(vec![NavigationTarget { - file_id, - name: name.text().clone(), - range: name.syntax().range(), - kind: MODULE, - ptr: None, - }]) - } - /// Returns `Vec` for the same reason as `parent_module` - pub(crate) fn crate_for(&self, file_id: FileId) -> Cancelable> { - let module = match source_binder::module_from_file_id(self, file_id)? { - Some(it) => it, - None => return Ok(Vec::new()), - }; - let krate = match module.krate(self)? { - Some(it) => it, - None => return Ok(Vec::new()), - }; - Ok(vec![krate.crate_id()]) - } - pub(crate) fn find_all_refs( - &self, - position: FilePosition, - ) -> Cancelable> { - let file = self.source_file(position.file_id); - // Find the binding associated with the offset - let (binding, descr) = match find_binding(self, &file, position)? { - None => return Ok(Vec::new()), - Some(it) => it, - }; - - let mut ret = binding - .name() - .into_iter() - .map(|name| (position.file_id, name.syntax().range())) - .collect::>(); - ret.extend( - descr - .scopes(self)? - .find_all_refs(binding) - .into_iter() - .map(|ref_desc| (position.file_id, ref_desc.range)), - ); - - return Ok(ret); - - fn find_binding<'a>( - db: &db::RootDatabase, - source_file: &'a SourceFile, - position: FilePosition, - ) -> Cancelable> { - let syntax = source_file.syntax(); - if let Some(binding) = find_node_at_offset::(syntax, position.offset) { - let descr = ctry!(source_binder::function_from_child_node( - db, - position.file_id, - binding.syntax(), - )?); - return Ok(Some((binding, descr))); - }; - let name_ref = ctry!(find_node_at_offset::(syntax, position.offset)); - let descr = ctry!(source_binder::function_from_child_node( - db, - position.file_id, - name_ref.syntax(), - )?); - let scope = descr.scopes(db)?; - let resolved = ctry!(scope.resolve_local_name(name_ref)); - let resolved = resolved.ptr().resolve(source_file); - let binding = ctry!(find_node_at_offset::( - syntax, - resolved.range().end() - )); - Ok(Some((binding, descr))) - } - } - - pub(crate) fn diagnostics(&self, file_id: FileId) -> Cancelable> { - let syntax = self.source_file(file_id); - - let mut res = ra_ide_api_light::diagnostics(&syntax) - .into_iter() - .map(|d| Diagnostic { - range: d.range, - message: d.msg, - severity: d.severity, - fix: d.fix.map(|fix| SourceChange::from_local_edit(file_id, fix)), - }) - .collect::>(); - if let Some(m) = source_binder::module_from_file_id(self, file_id)? { - for (name_node, problem) in m.problems(self)? { - let source_root = self.file_source_root(file_id); - let diag = match problem { - Problem::UnresolvedModule { candidate } => { - let create_file = FileSystemEdit::CreateFile { - source_root, - path: candidate.clone(), - }; - let fix = SourceChange { - label: "create module".to_string(), - source_file_edits: Vec::new(), - file_system_edits: vec![create_file], - cursor_position: None, - }; - Diagnostic { - range: name_node.range(), - message: "unresolved module".to_string(), - severity: Severity::Error, - fix: Some(fix), - } - } - Problem::NotDirOwner { move_to, candidate } => { - let move_file = FileSystemEdit::MoveFile { - src: file_id, - dst_source_root: source_root, - dst_path: move_to.clone(), - }; - let create_file = FileSystemEdit::CreateFile { - source_root, - path: move_to.join(candidate), - }; - let fix = SourceChange { - label: "move file and create module".to_string(), - source_file_edits: Vec::new(), - file_system_edits: vec![move_file, create_file], - cursor_position: None, - }; - Diagnostic { - range: name_node.range(), - message: "can't declare module at this location".to_string(), - severity: Severity::Error, - fix: Some(fix), - } - } - }; - res.push(diag) - } - }; - Ok(res) - } - - pub(crate) fn assists(&self, frange: FileRange) -> Vec { - let file = self.source_file(frange.file_id); - assists::assists(&file, frange.range) - .into_iter() - .map(|local_edit| SourceChange::from_local_edit(frange.file_id, local_edit)) - .collect() - } - - pub(crate) fn rename( - &self, - position: FilePosition, - new_name: &str, - ) -> Cancelable> { - let res = self - .find_all_refs(position)? - .iter() - .map(|(file_id, text_range)| SourceFileEdit { - file_id: *file_id, - edit: { - let mut builder = ra_text_edit::TextEditBuilder::default(); - builder.replace(*text_range, new_name.into()); - builder.finish() - }, - }) - .collect::>(); - Ok(res) - } - pub(crate) fn index_resolve(&self, name_ref: &ast::NameRef) -> Cancelable> { - let name = name_ref.text(); - let mut query = Query::new(name.to_string()); - query.exact(); - query.limit(4); - crate::symbol_index::world_symbols(self, query) - } -} - -impl SourceChange { - pub(crate) fn from_local_edit(file_id: FileId, edit: LocalEdit) -> SourceChange { - let file_edit = SourceFileEdit { - file_id, - edit: edit.edit, - }; - SourceChange { - label: edit.label, - source_file_edits: vec![file_edit], - file_system_edits: vec![], - cursor_position: edit - .cursor_position - .map(|offset| FilePosition { offset, file_id }), - } - } -} diff --git a/crates/ra_analysis/src/lib.rs b/crates/ra_analysis/src/lib.rs deleted file mode 100644 index 183e36706..000000000 --- a/crates/ra_analysis/src/lib.rs +++ /dev/null @@ -1,509 +0,0 @@ -//! ra_analyzer crate provides "ide-centric" APIs for the rust-analyzer. What -//! powers this API are the `RootDatabase` struct, which defines a `salsa` -//! database, and the `ra_hir` crate, where majority of the analysis happens. -//! However, IDE specific bits of the analysis (most notably completion) happen -//! in this crate. -macro_rules! ctry { - ($expr:expr) => { - match $expr { - None => return Ok(None), - Some(it) => it, - } - }; -} - -mod completion; -mod db; -mod goto_defenition; -mod imp; -pub mod mock_analysis; -mod runnables; -mod symbol_index; - -mod extend_selection; -mod hover; -mod call_info; -mod syntax_highlighting; - -use std::{fmt, sync::Arc}; - -use ra_syntax::{SmolStr, SourceFile, TreePtr, SyntaxKind, TextRange, TextUnit}; -use ra_text_edit::TextEdit; -use ra_db::{SyntaxDatabase, FilesDatabase, LocalSyntaxPtr}; -use rayon::prelude::*; -use relative_path::RelativePathBuf; -use rustc_hash::FxHashMap; -use salsa::ParallelDatabase; - -use crate::{ - symbol_index::{FileSymbol, SymbolIndex}, - db::LineIndexDatabase, -}; - -pub use crate::{ - completion::{CompletionItem, CompletionItemKind, InsertText}, - runnables::{Runnable, RunnableKind}, -}; -pub use ra_ide_api_light::{ - Fold, FoldKind, HighlightedRange, Severity, StructureNode, - LineIndex, LineCol, translate_offset_with_edit, -}; -pub use ra_db::{ - Cancelable, Canceled, CrateGraph, CrateId, FileId, FilePosition, FileRange, SourceRootId -}; - -#[derive(Default)] -pub struct AnalysisChange { - new_roots: Vec<(SourceRootId, bool)>, - roots_changed: FxHashMap, - files_changed: Vec<(FileId, Arc)>, - libraries_added: Vec, - crate_graph: Option, -} - -#[derive(Default)] -struct RootChange { - added: Vec, - removed: Vec, -} - -#[derive(Debug)] -struct AddFile { - file_id: FileId, - path: RelativePathBuf, - text: Arc, -} - -#[derive(Debug)] -struct RemoveFile { - file_id: FileId, - path: RelativePathBuf, -} - -impl fmt::Debug for AnalysisChange { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - let mut d = fmt.debug_struct("AnalysisChange"); - if !self.new_roots.is_empty() { - d.field("new_roots", &self.new_roots); - } - if !self.roots_changed.is_empty() { - d.field("roots_changed", &self.roots_changed); - } - if !self.files_changed.is_empty() { - d.field("files_changed", &self.files_changed.len()); - } - if !self.libraries_added.is_empty() { - d.field("libraries_added", &self.libraries_added.len()); - } - if !self.crate_graph.is_some() { - d.field("crate_graph", &self.crate_graph); - } - d.finish() - } -} - -impl fmt::Debug for RootChange { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_struct("AnalysisChange") - .field("added", &self.added.len()) - .field("removed", &self.removed.len()) - .finish() - } -} - -impl AnalysisChange { - pub fn new() -> AnalysisChange { - AnalysisChange::default() - } - pub fn add_root(&mut self, root_id: SourceRootId, is_local: bool) { - self.new_roots.push((root_id, is_local)); - } - pub fn add_file( - &mut self, - root_id: SourceRootId, - file_id: FileId, - path: RelativePathBuf, - text: Arc, - ) { - let file = AddFile { - file_id, - path, - text, - }; - self.roots_changed - .entry(root_id) - .or_default() - .added - .push(file); - } - pub fn change_file(&mut self, file_id: FileId, new_text: Arc) { - self.files_changed.push((file_id, new_text)) - } - pub fn remove_file(&mut self, root_id: SourceRootId, file_id: FileId, path: RelativePathBuf) { - let file = RemoveFile { file_id, path }; - self.roots_changed - .entry(root_id) - .or_default() - .removed - .push(file); - } - pub fn add_library(&mut self, data: LibraryData) { - self.libraries_added.push(data) - } - pub fn set_crate_graph(&mut self, graph: CrateGraph) { - self.crate_graph = Some(graph); - } -} - -#[derive(Debug)] -pub struct SourceChange { - pub label: String, - pub source_file_edits: Vec, - pub file_system_edits: Vec, - pub cursor_position: Option, -} - -#[derive(Debug)] -pub struct SourceFileEdit { - pub file_id: FileId, - pub edit: TextEdit, -} - -#[derive(Debug)] -pub enum FileSystemEdit { - CreateFile { - source_root: SourceRootId, - path: RelativePathBuf, - }, - MoveFile { - src: FileId, - dst_source_root: SourceRootId, - dst_path: RelativePathBuf, - }, -} - -#[derive(Debug)] -pub struct Diagnostic { - pub message: String, - pub range: TextRange, - pub fix: Option, - pub severity: Severity, -} - -#[derive(Debug)] -pub struct Query { - query: String, - lowercased: String, - only_types: bool, - libs: bool, - exact: bool, - limit: usize, -} - -impl Query { - pub fn new(query: String) -> Query { - let lowercased = query.to_lowercase(); - Query { - query, - lowercased, - only_types: false, - libs: false, - exact: false, - limit: usize::max_value(), - } - } - pub fn only_types(&mut self) { - self.only_types = true; - } - pub fn libs(&mut self) { - self.libs = true; - } - pub fn exact(&mut self) { - self.exact = true; - } - pub fn limit(&mut self, limit: usize) { - self.limit = limit - } -} - -/// `NavigationTarget` represents and element in the editor's UI whihc you can -/// click on to navigate to a particular piece of code. -/// -/// Typically, a `NavigationTarget` corresponds to some element in the source -/// code, like a function or a struct, but this is not strictly required. -#[derive(Debug, Clone)] -pub struct NavigationTarget { - file_id: FileId, - name: SmolStr, - kind: SyntaxKind, - range: TextRange, - // Should be DefId ideally - ptr: Option, -} - -impl NavigationTarget { - fn from_symbol(symbol: FileSymbol) -> NavigationTarget { - NavigationTarget { - file_id: symbol.file_id, - name: symbol.name.clone(), - kind: symbol.ptr.kind(), - range: symbol.ptr.range(), - ptr: Some(symbol.ptr.clone()), - } - } - pub fn name(&self) -> &SmolStr { - &self.name - } - pub fn kind(&self) -> SyntaxKind { - self.kind - } - pub fn file_id(&self) -> FileId { - self.file_id - } - pub fn range(&self) -> TextRange { - self.range - } -} - -#[derive(Debug)] -pub struct RangeInfo { - pub range: TextRange, - pub info: T, -} - -impl RangeInfo { - fn new(range: TextRange, info: T) -> RangeInfo { - RangeInfo { range, info } - } -} - -#[derive(Debug)] -pub struct CallInfo { - pub label: String, - pub doc: Option, - pub parameters: Vec, - pub active_parameter: Option, -} - -/// `AnalysisHost` stores the current state of the world. -#[derive(Debug, Default)] -pub struct AnalysisHost { - db: db::RootDatabase, -} - -impl AnalysisHost { - /// Returns a snapshot of the current state, which you can query for - /// semantic information. - pub fn analysis(&self) -> Analysis { - Analysis { - db: self.db.snapshot(), - } - } - /// Applies changes to the current state of the world. If there are - /// outstanding snapshots, they will be canceled. - pub fn apply_change(&mut self, change: AnalysisChange) { - self.db.apply_change(change) - } -} - -/// Analysis is a snapshot of a world state at a moment in time. It is the main -/// entry point for asking semantic information about the world. When the world -/// state is advanced using `AnalysisHost::apply_change` method, all existing -/// `Analysis` are canceled (most method return `Err(Canceled)`). -#[derive(Debug)] -pub struct Analysis { - db: salsa::Snapshot, -} - -impl Analysis { - /// Gets the text of the source file. - pub fn file_text(&self, file_id: FileId) -> Arc { - self.db.file_text(file_id) - } - /// Gets the syntax tree of the file. - pub fn file_syntax(&self, file_id: FileId) -> TreePtr { - self.db.source_file(file_id).clone() - } - /// Gets the file's `LineIndex`: data structure to convert between absolute - /// offsets and line/column representation. - pub fn file_line_index(&self, file_id: FileId) -> Arc { - self.db.line_index(file_id) - } - /// Selects the next syntactic nodes encopasing the range. - pub fn extend_selection(&self, frange: FileRange) -> TextRange { - extend_selection::extend_selection(&self.db, frange) - } - /// Returns position of the mathcing brace (all types of braces are - /// supported). - pub fn matching_brace(&self, file: &SourceFile, offset: TextUnit) -> Option { - ra_ide_api_light::matching_brace(file, offset) - } - /// Returns a syntax tree represented as `String`, for debug purposes. - // FIXME: use a better name here. - pub fn syntax_tree(&self, file_id: FileId) -> String { - let file = self.db.source_file(file_id); - ra_ide_api_light::syntax_tree(&file) - } - /// Returns an edit to remove all newlines in the range, cleaning up minor - /// stuff like trailing commas. - pub fn join_lines(&self, frange: FileRange) -> SourceChange { - let file = self.db.source_file(frange.file_id); - SourceChange::from_local_edit( - frange.file_id, - ra_ide_api_light::join_lines(&file, frange.range), - ) - } - /// Returns an edit which should be applied when opening a new line, fixing - /// up minor stuff like continuing the comment. - pub fn on_enter(&self, position: FilePosition) -> Option { - let file = self.db.source_file(position.file_id); - let edit = ra_ide_api_light::on_enter(&file, position.offset)?; - Some(SourceChange::from_local_edit(position.file_id, edit)) - } - /// Returns an edit which should be applied after `=` was typed. Primarily, - /// this works when adding `let =`. - // FIXME: use a snippet completion instead of this hack here. - pub fn on_eq_typed(&self, position: FilePosition) -> Option { - let file = self.db.source_file(position.file_id); - let edit = ra_ide_api_light::on_eq_typed(&file, position.offset)?; - Some(SourceChange::from_local_edit(position.file_id, edit)) - } - /// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately. - pub fn on_dot_typed(&self, position: FilePosition) -> Option { - let file = self.db.source_file(position.file_id); - let edit = ra_ide_api_light::on_dot_typed(&file, position.offset)?; - Some(SourceChange::from_local_edit(position.file_id, edit)) - } - /// Returns a tree representation of symbols in the file. Useful to draw a - /// file outline. - pub fn file_structure(&self, file_id: FileId) -> Vec { - let file = self.db.source_file(file_id); - ra_ide_api_light::file_structure(&file) - } - /// Returns the set of folding ranges. - pub fn folding_ranges(&self, file_id: FileId) -> Vec { - let file = self.db.source_file(file_id); - ra_ide_api_light::folding_ranges(&file) - } - /// Fuzzy searches for a symbol. - pub fn symbol_search(&self, query: Query) -> Cancelable> { - let res = symbol_index::world_symbols(&*self.db, query)? - .into_iter() - .map(NavigationTarget::from_symbol) - .collect(); - Ok(res) - } - pub fn goto_defenition( - &self, - position: FilePosition, - ) -> Cancelable>> { - goto_defenition::goto_defenition(&*self.db, position) - } - /// Finds all usages of the reference at point. - pub fn find_all_refs(&self, position: FilePosition) -> Cancelable> { - self.db.find_all_refs(position) - } - /// Returns a short text descrbing element at position. - pub fn hover(&self, position: FilePosition) -> Cancelable>> { - hover::hover(&*self.db, position) - } - /// Computes parameter information for the given call expression. - pub fn call_info(&self, position: FilePosition) -> Cancelable> { - call_info::call_info(&*self.db, position) - } - /// Returns a `mod name;` declaration which created the current module. - pub fn parent_module(&self, position: FilePosition) -> Cancelable> { - self.db.parent_module(position) - } - /// Returns crates this file belongs too. - pub fn crate_for(&self, file_id: FileId) -> Cancelable> { - self.db.crate_for(file_id) - } - /// Returns the root file of the given crate. - pub fn crate_root(&self, crate_id: CrateId) -> Cancelable { - Ok(self.db.crate_graph().crate_root(crate_id)) - } - /// Returns the set of possible targets to run for the current file. - pub fn runnables(&self, file_id: FileId) -> Cancelable> { - runnables::runnables(&*self.db, file_id) - } - /// Computes syntax highlighting for the given file. - pub fn highlight(&self, file_id: FileId) -> Cancelable> { - syntax_highlighting::highlight(&*self.db, file_id) - } - /// Computes completions at the given position. - pub fn completions(&self, position: FilePosition) -> Cancelable>> { - let completions = completion::completions(&self.db, position)?; - Ok(completions.map(|it| it.into())) - } - /// Computes assists (aks code actons aka intentions) for the given - /// position. - pub fn assists(&self, frange: FileRange) -> Cancelable> { - Ok(self.db.assists(frange)) - } - /// Computes the set of diagnostics for the given file. - pub fn diagnostics(&self, file_id: FileId) -> Cancelable> { - self.db.diagnostics(file_id) - } - /// Computes the type of the expression at the given position. - pub fn type_of(&self, frange: FileRange) -> Cancelable> { - hover::type_of(&*self.db, frange) - } - /// Returns the edit required to rename reference at the position to the new - /// name. - pub fn rename( - &self, - position: FilePosition, - new_name: &str, - ) -> Cancelable> { - self.db.rename(position, new_name) - } -} - -pub struct LibraryData { - root_id: SourceRootId, - root_change: RootChange, - symbol_index: SymbolIndex, -} - -impl fmt::Debug for LibraryData { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("LibraryData") - .field("root_id", &self.root_id) - .field("root_change", &self.root_change) - .field("n_symbols", &self.symbol_index.len()) - .finish() - } -} - -impl LibraryData { - pub fn prepare( - root_id: SourceRootId, - files: Vec<(FileId, RelativePathBuf, Arc)>, - ) -> LibraryData { - let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, _, text)| { - let file = SourceFile::parse(text); - (*file_id, file) - })); - let mut root_change = RootChange::default(); - root_change.added = files - .into_iter() - .map(|(file_id, path, text)| AddFile { - file_id, - path, - text, - }) - .collect(); - LibraryData { - root_id, - root_change, - symbol_index, - } - } -} - -#[test] -fn analysis_is_send() { - fn is_send() {} - is_send::(); -} diff --git a/crates/ra_analysis/src/mock_analysis.rs b/crates/ra_analysis/src/mock_analysis.rs deleted file mode 100644 index 846c76cfe..000000000 --- a/crates/ra_analysis/src/mock_analysis.rs +++ /dev/null @@ -1,135 +0,0 @@ -use std::sync::Arc; - -use relative_path::RelativePathBuf; -use test_utils::{extract_offset, extract_range, parse_fixture, CURSOR_MARKER}; -use ra_db::mock::FileMap; - -use crate::{Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, FilePosition, FileRange, SourceRootId}; - -/// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis -/// from a set of in-memory files. -#[derive(Debug, Default)] -pub struct MockAnalysis { - files: Vec<(String, String)>, -} - -impl MockAnalysis { - pub fn new() -> MockAnalysis { - MockAnalysis::default() - } - /// Creates `MockAnalysis` using a fixture data in the following format: - /// - /// ```notrust - /// //- /main.rs - /// mod foo; - /// fn main() {} - /// - /// //- /foo.rs - /// struct Baz; - /// ``` - pub fn with_files(fixture: &str) -> MockAnalysis { - let mut res = MockAnalysis::new(); - for entry in parse_fixture(fixture) { - res.add_file(&entry.meta, &entry.text); - } - res - } - - /// Same as `with_files`, but requires that a single file contains a `<|>` marker, - /// whose position is also returned. - pub fn with_files_and_position(fixture: &str) -> (MockAnalysis, FilePosition) { - let mut position = None; - let mut res = MockAnalysis::new(); - for entry in parse_fixture(fixture) { - if entry.text.contains(CURSOR_MARKER) { - assert!( - position.is_none(), - "only one marker (<|>) per fixture is allowed" - ); - position = Some(res.add_file_with_position(&entry.meta, &entry.text)); - } else { - res.add_file(&entry.meta, &entry.text); - } - } - let position = position.expect("expected a marker (<|>)"); - (res, position) - } - - pub fn add_file(&mut self, path: &str, text: &str) -> FileId { - let file_id = FileId((self.files.len() + 1) as u32); - self.files.push((path.to_string(), text.to_string())); - file_id - } - pub fn add_file_with_position(&mut self, path: &str, text: &str) -> FilePosition { - let (offset, text) = extract_offset(text); - let file_id = FileId((self.files.len() + 1) as u32); - self.files.push((path.to_string(), text.to_string())); - FilePosition { file_id, offset } - } - pub fn add_file_with_range(&mut self, path: &str, text: &str) -> FileRange { - let (range, text) = extract_range(text); - let file_id = FileId((self.files.len() + 1) as u32); - self.files.push((path.to_string(), text.to_string())); - FileRange { file_id, range } - } - pub fn id_of(&self, path: &str) -> FileId { - let (idx, _) = self - .files - .iter() - .enumerate() - .find(|(_, (p, _text))| path == p) - .expect("no file in this mock"); - FileId(idx as u32 + 1) - } - pub fn analysis_host(self) -> AnalysisHost { - let mut host = AnalysisHost::default(); - let mut file_map = FileMap::default(); - let source_root = SourceRootId(0); - let mut change = AnalysisChange::new(); - change.add_root(source_root, true); - let mut crate_graph = CrateGraph::default(); - for (path, contents) in self.files.into_iter() { - assert!(path.starts_with('/')); - let path = RelativePathBuf::from_path(&path[1..]).unwrap(); - let file_id = file_map.add(path.clone()); - if path == "/lib.rs" || path == "/main.rs" { - crate_graph.add_crate_root(file_id); - } - change.add_file(source_root, file_id, path, Arc::new(contents)); - } - change.set_crate_graph(crate_graph); - // change.set_file_resolver(Arc::new(file_map)); - host.apply_change(change); - host - } - pub fn analysis(self) -> Analysis { - self.analysis_host().analysis() - } -} - -/// Creates analysis from a multi-file fixture, returns positions marked with <|>. -pub fn analysis_and_position(fixture: &str) -> (Analysis, FilePosition) { - let (mock, position) = MockAnalysis::with_files_and_position(fixture); - (mock.analysis(), position) -} - -/// Creates analysis for a single file. -pub fn single_file(code: &str) -> (Analysis, FileId) { - let mut mock = MockAnalysis::new(); - let file_id = mock.add_file("/main.rs", code); - (mock.analysis(), file_id) -} - -/// Creates analysis for a single file, returns position marked with <|>. -pub fn single_file_with_position(code: &str) -> (Analysis, FilePosition) { - let mut mock = MockAnalysis::new(); - let pos = mock.add_file_with_position("/main.rs", code); - (mock.analysis(), pos) -} - -/// Creates analysis for a single file, returns range marked with a pair of <|>. -pub fn single_file_with_range(code: &str) -> (Analysis, FileRange) { - let mut mock = MockAnalysis::new(); - let pos = mock.add_file_with_range("/main.rs", code); - (mock.analysis(), pos) -} diff --git a/crates/ra_analysis/src/runnables.rs b/crates/ra_analysis/src/runnables.rs deleted file mode 100644 index 98b1d2d55..000000000 --- a/crates/ra_analysis/src/runnables.rs +++ /dev/null @@ -1,89 +0,0 @@ -use itertools::Itertools; -use ra_syntax::{ - TextRange, SyntaxNode, - ast::{self, AstNode, NameOwner, ModuleItemOwner}, -}; -use ra_db::{Cancelable, SyntaxDatabase}; - -use crate::{db::RootDatabase, FileId}; - -#[derive(Debug)] -pub struct Runnable { - pub range: TextRange, - pub kind: RunnableKind, -} - -#[derive(Debug)] -pub enum RunnableKind { - Test { name: String }, - TestMod { path: String }, - Bin, -} - -pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Cancelable> { - let source_file = db.source_file(file_id); - let res = source_file - .syntax() - .descendants() - .filter_map(|i| runnable(db, file_id, i)) - .collect(); - Ok(res) -} - -fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option { - if let Some(fn_def) = ast::FnDef::cast(item) { - runnable_fn(fn_def) - } else if let Some(m) = ast::Module::cast(item) { - runnable_mod(db, file_id, m) - } else { - None - } -} - -fn runnable_fn(fn_def: &ast::FnDef) -> Option { - let name = fn_def.name()?.text(); - let kind = if name == "main" { - RunnableKind::Bin - } else if fn_def.has_atom_attr("test") { - RunnableKind::Test { - name: name.to_string(), - } - } else { - return None; - }; - Some(Runnable { - range: fn_def.syntax().range(), - kind, - }) -} - -fn runnable_mod(db: &RootDatabase, file_id: FileId, module: &ast::Module) -> Option { - let has_test_function = module - .item_list()? - .items() - .filter_map(|it| match it.kind() { - ast::ModuleItemKind::FnDef(it) => Some(it), - _ => None, - }) - .any(|f| f.has_atom_attr("test")); - if !has_test_function { - return None; - } - let range = module.syntax().range(); - let module = - hir::source_binder::module_from_child_node(db, file_id, module.syntax()).ok()??; - - // FIXME: thread cancellation instead of `.ok`ing - let path = module - .path_to_root(db) - .ok()? - .into_iter() - .rev() - .filter_map(|it| it.name(db).ok()) - .filter_map(|it| it) - .join("::"); - Some(Runnable { - range, - kind: RunnableKind::TestMod { path }, - }) -} diff --git a/crates/ra_analysis/src/symbol_index.rs b/crates/ra_analysis/src/symbol_index.rs deleted file mode 100644 index 8dd15b40e..000000000 --- a/crates/ra_analysis/src/symbol_index.rs +++ /dev/null @@ -1,222 +0,0 @@ -//! This module handles fuzzy-searching of functions, structs and other symbols -//! by name across the whole workspace and dependencies. -//! -//! It works by building an incrementally-updated text-search index of all -//! symbols. The backbone of the index is the **awesome** `fst` crate by -//! @BurntSushi. -//! -//! In a nutshell, you give a set of strings to the `fst`, and it builds a -//! finite state machine describing this set of strtings. The strings which -//! could fuzzy-match a pattern can also be described by a finite state machine. -//! What is freakingly cool is that you can now traverse both state machines in -//! lock-step to enumerate the strings which are both in the input set and -//! fuzz-match the query. Or, more formally, given two langauges described by -//! fsts, one can build an product fst which describes the intersection of the -//! languages. -//! -//! `fst` does not support cheap updating of the index, but it supports unioning -//! of state machines. So, to account for changing source code, we build an fst -//! for each library (which is assumed to never change) and an fst for each rust -//! file in the current workspace, and run a query aginst the union of all -//! thouse fsts. -use std::{ - cmp::Ordering, - hash::{Hash, Hasher}, - sync::Arc, -}; - -use fst::{self, Streamer}; -use ra_syntax::{ - SyntaxNode, SourceFile, SmolStr, TreePtr, AstNode, - algo::{visit::{visitor, Visitor}, find_covering_node}, - SyntaxKind::{self, *}, - ast::{self, NameOwner}, -}; -use ra_db::{SourceRootId, FilesDatabase, LocalSyntaxPtr}; -use salsa::ParallelDatabase; -use rayon::prelude::*; - -use crate::{ - Cancelable, FileId, Query, - db::RootDatabase, -}; - -salsa::query_group! { - pub(crate) trait SymbolsDatabase: hir::db::HirDatabase { - fn file_symbols(file_id: FileId) -> Cancelable> { - type FileSymbolsQuery; - } - fn library_symbols(id: SourceRootId) -> Arc { - type LibrarySymbolsQuery; - storage input; - } - } -} - -fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Cancelable> { - db.check_canceled()?; - let source_file = db.source_file(file_id); - let mut symbols = source_file - .syntax() - .descendants() - .filter_map(to_symbol) - .map(move |(name, ptr)| FileSymbol { name, ptr, file_id }) - .collect::>(); - - for (name, text_range) in hir::source_binder::macro_symbols(db, file_id)? { - let node = find_covering_node(source_file.syntax(), text_range); - let ptr = LocalSyntaxPtr::new(node); - symbols.push(FileSymbol { file_id, name, ptr }) - } - - Ok(Arc::new(SymbolIndex::new(symbols))) -} - -pub(crate) fn world_symbols(db: &RootDatabase, query: Query) -> Cancelable> { - /// Need to wrap Snapshot to provide `Clone` impl for `map_with` - struct Snap(salsa::Snapshot); - impl Clone for Snap { - fn clone(&self) -> Snap { - Snap(self.0.snapshot()) - } - } - - let buf: Vec> = if query.libs { - let snap = Snap(db.snapshot()); - db.library_roots() - .par_iter() - .map_with(snap, |db, &lib_id| db.0.library_symbols(lib_id)) - .collect() - } else { - let mut files = Vec::new(); - for &root in db.local_roots().iter() { - let sr = db.source_root(root); - files.extend(sr.files.values().map(|&it| it)) - } - - let snap = Snap(db.snapshot()); - files - .par_iter() - .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) - .filter_map(|it| it.ok()) - .collect() - }; - Ok(query.search(&buf)) -} - -#[derive(Default, Debug)] -pub(crate) struct SymbolIndex { - symbols: Vec, - map: fst::Map, -} - -impl PartialEq for SymbolIndex { - fn eq(&self, other: &SymbolIndex) -> bool { - self.symbols == other.symbols - } -} - -impl Eq for SymbolIndex {} - -impl Hash for SymbolIndex { - fn hash(&self, hasher: &mut H) { - self.symbols.hash(hasher) - } -} - -impl SymbolIndex { - fn new(mut symbols: Vec) -> SymbolIndex { - fn cmp(s1: &FileSymbol, s2: &FileSymbol) -> Ordering { - unicase::Ascii::new(s1.name.as_str()).cmp(&unicase::Ascii::new(s2.name.as_str())) - } - symbols.par_sort_by(cmp); - symbols.dedup_by(|s1, s2| cmp(s1, s2) == Ordering::Equal); - let names = symbols.iter().map(|it| it.name.as_str().to_lowercase()); - let map = fst::Map::from_iter(names.into_iter().zip(0u64..)).unwrap(); - SymbolIndex { symbols, map } - } - - pub(crate) fn len(&self) -> usize { - self.symbols.len() - } - - pub(crate) fn for_files( - files: impl ParallelIterator)>, - ) -> SymbolIndex { - let symbols = files - .flat_map(|(file_id, file)| { - file.syntax() - .descendants() - .filter_map(to_symbol) - .map(move |(name, ptr)| FileSymbol { name, ptr, file_id }) - .collect::>() - }) - .collect::>(); - SymbolIndex::new(symbols) - } -} - -impl Query { - pub(crate) fn search(self, indices: &[Arc]) -> Vec { - let mut op = fst::map::OpBuilder::new(); - for file_symbols in indices.iter() { - let automaton = fst::automaton::Subsequence::new(&self.lowercased); - op = op.add(file_symbols.map.search(automaton)) - } - let mut stream = op.union(); - let mut res = Vec::new(); - while let Some((_, indexed_values)) = stream.next() { - if res.len() >= self.limit { - break; - } - for indexed_value in indexed_values { - let file_symbols = &indices[indexed_value.index]; - let idx = indexed_value.value as usize; - - let symbol = &file_symbols.symbols[idx]; - if self.only_types && !is_type(symbol.ptr.kind()) { - continue; - } - if self.exact && symbol.name != self.query { - continue; - } - res.push(symbol.clone()); - } - } - res - } -} - -fn is_type(kind: SyntaxKind) -> bool { - match kind { - STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_DEF => true, - _ => false, - } -} - -/// The actual data that is stored in the index. It should be as compact as -/// possible. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub(crate) struct FileSymbol { - pub(crate) file_id: FileId, - pub(crate) name: SmolStr, - pub(crate) ptr: LocalSyntaxPtr, -} - -fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, LocalSyntaxPtr)> { - fn decl(node: &N) -> Option<(SmolStr, LocalSyntaxPtr)> { - let name = node.name()?.text().clone(); - let ptr = LocalSyntaxPtr::new(node.syntax()); - Some((name, ptr)) - } - visitor() - .visit(decl::) - .visit(decl::) - .visit(decl::) - .visit(decl::) - .visit(decl::) - .visit(decl::) - .visit(decl::) - .visit(decl::) - .accept(node)? -} diff --git a/crates/ra_analysis/src/syntax_highlighting.rs b/crates/ra_analysis/src/syntax_highlighting.rs deleted file mode 100644 index cb19e9515..000000000 --- a/crates/ra_analysis/src/syntax_highlighting.rs +++ /dev/null @@ -1,92 +0,0 @@ -use ra_syntax::{ast, AstNode,}; -use ra_db::SyntaxDatabase; - -use crate::{ - FileId, Cancelable, HighlightedRange, - db::RootDatabase, -}; - -pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Cancelable> { - let source_file = db.source_file(file_id); - let mut res = ra_ide_api_light::highlight(source_file.syntax()); - for macro_call in source_file - .syntax() - .descendants() - .filter_map(ast::MacroCall::cast) - { - if let Some((off, exp)) = hir::MacroDef::ast_expand(macro_call) { - let mapped_ranges = ra_ide_api_light::highlight(&exp.syntax()) - .into_iter() - .filter_map(|r| { - let mapped_range = exp.map_range_back(r.range)?; - let res = HighlightedRange { - range: mapped_range + off, - tag: r.tag, - }; - Some(res) - }); - res.extend(mapped_ranges); - } - } - Ok(res) -} - -#[cfg(test)] -mod tests { - use crate::mock_analysis::single_file; - use test_utils::assert_eq_dbg; - - #[test] - fn highlights_code_inside_macros() { - let (analysis, file_id) = single_file( - " - fn main() { - ctry!({ let x = 92; x}); - vec![{ let x = 92; x}]; - } - ", - ); - let highlights = analysis.highlight(file_id).unwrap(); - assert_eq_dbg( - r#"[HighlightedRange { range: [13; 15), tag: "keyword" }, - HighlightedRange { range: [16; 20), tag: "function" }, - HighlightedRange { range: [41; 46), tag: "macro" }, - HighlightedRange { range: [49; 52), tag: "keyword" }, - HighlightedRange { range: [57; 59), tag: "literal" }, - HighlightedRange { range: [82; 86), tag: "macro" }, - HighlightedRange { range: [89; 92), tag: "keyword" }, - HighlightedRange { range: [97; 99), tag: "literal" }, - HighlightedRange { range: [49; 52), tag: "keyword" }, - HighlightedRange { range: [53; 54), tag: "function" }, - HighlightedRange { range: [57; 59), tag: "literal" }, - HighlightedRange { range: [61; 62), tag: "text" }, - HighlightedRange { range: [89; 92), tag: "keyword" }, - HighlightedRange { range: [93; 94), tag: "function" }, - HighlightedRange { range: [97; 99), tag: "literal" }, - HighlightedRange { range: [101; 102), tag: "text" }]"#, - &highlights, - ) - } - - // FIXME: this test is not really necessary: artifact of the inital hacky - // macros implementation. - #[test] - fn highlight_query_group_macro() { - let (analysis, file_id) = single_file( - " - salsa::query_group! { - pub trait HirDatabase: SyntaxDatabase {} - } - ", - ); - let highlights = analysis.highlight(file_id).unwrap(); - assert_eq_dbg( - r#"[HighlightedRange { range: [20; 32), tag: "macro" }, - HighlightedRange { range: [13; 18), tag: "text" }, - HighlightedRange { range: [51; 54), tag: "keyword" }, - HighlightedRange { range: [55; 60), tag: "keyword" }, - HighlightedRange { range: [61; 72), tag: "function" }]"#, - &highlights, - ) - } -} diff --git a/crates/ra_analysis/tests/test/main.rs b/crates/ra_analysis/tests/test/main.rs deleted file mode 100644 index 2c0735cb5..000000000 --- a/crates/ra_analysis/tests/test/main.rs +++ /dev/null @@ -1,249 +0,0 @@ -mod runnables; - -use ra_syntax::TextRange; -use test_utils::{assert_eq_dbg, assert_eq_text}; - -use ra_analysis::{ - mock_analysis::{analysis_and_position, single_file, single_file_with_position, MockAnalysis}, - AnalysisChange, CrateGraph, FileId, Query -}; - -#[test] -fn test_unresolved_module_diagnostic() { - let (analysis, file_id) = single_file("mod foo;"); - let diagnostics = analysis.diagnostics(file_id).unwrap(); - assert_eq_dbg( - r#"[Diagnostic { - message: "unresolved module", - range: [4; 7), - fix: Some(SourceChange { - label: "create module", - source_file_edits: [], - file_system_edits: [CreateFile { source_root: SourceRootId(0), path: "foo.rs" }], - cursor_position: None }), - severity: Error }]"#, - &diagnostics, - ); -} - -// FIXME: move this test to hir -#[test] -fn test_unresolved_module_diagnostic_no_diag_for_inline_mode() { - let (analysis, file_id) = single_file("mod foo {}"); - let diagnostics = analysis.diagnostics(file_id).unwrap(); - assert_eq_dbg(r#"[]"#, &diagnostics); -} - -#[test] -fn test_resolve_parent_module() { - let (analysis, pos) = analysis_and_position( - " - //- /lib.rs - mod foo; - //- /foo.rs - <|>// empty - ", - ); - let symbols = analysis.parent_module(pos).unwrap(); - assert_eq_dbg( - r#"[NavigationTarget { file_id: FileId(1), name: "foo", kind: MODULE, range: [4; 7), ptr: None }]"#, - &symbols, - ); -} - -#[test] -fn test_resolve_parent_module_for_inline() { - let (analysis, pos) = analysis_and_position( - " - //- /lib.rs - mod foo { - mod bar { - mod baz { <|> } - } - } - ", - ); - let symbols = analysis.parent_module(pos).unwrap(); - assert_eq_dbg( - r#"[NavigationTarget { file_id: FileId(1), name: "baz", kind: MODULE, range: [36; 39), ptr: None }]"#, - &symbols, - ); -} - -#[test] -fn test_resolve_crate_root() { - let mock = MockAnalysis::with_files( - " - //- /bar.rs - mod foo; - //- /bar/foo.rs - // emtpy <|> - ", - ); - let root_file = mock.id_of("/bar.rs"); - let mod_file = mock.id_of("/bar/foo.rs"); - let mut host = mock.analysis_host(); - assert!(host.analysis().crate_for(mod_file).unwrap().is_empty()); - - let mut crate_graph = CrateGraph::default(); - let crate_id = crate_graph.add_crate_root(root_file); - let mut change = AnalysisChange::new(); - change.set_crate_graph(crate_graph); - host.apply_change(change); - - assert_eq!(host.analysis().crate_for(mod_file).unwrap(), vec![crate_id]); -} - -fn get_all_refs(text: &str) -> Vec<(FileId, TextRange)> { - let (analysis, position) = single_file_with_position(text); - analysis.find_all_refs(position).unwrap() -} - -#[test] -fn test_find_all_refs_for_local() { - let code = r#" - fn main() { - let mut i = 1; - let j = 1; - i = i<|> + j; - - { - i = 0; - } - - i = 5; - }"#; - - let refs = get_all_refs(code); - assert_eq!(refs.len(), 5); -} - -#[test] -fn test_find_all_refs_for_param_inside() { - let code = r#" - fn foo(i : u32) -> u32 { - i<|> - }"#; - - let refs = get_all_refs(code); - assert_eq!(refs.len(), 2); -} - -#[test] -fn test_find_all_refs_for_fn_param() { - let code = r#" - fn foo(i<|> : u32) -> u32 { - i - }"#; - - let refs = get_all_refs(code); - assert_eq!(refs.len(), 2); -} -#[test] -fn test_rename_for_local() { - test_rename( - r#" - fn main() { - let mut i = 1; - let j = 1; - i = i<|> + j; - - { - i = 0; - } - - i = 5; - }"#, - "k", - r#" - fn main() { - let mut k = 1; - let j = 1; - k = k + j; - - { - k = 0; - } - - k = 5; - }"#, - ); -} - -#[test] -fn test_rename_for_param_inside() { - test_rename( - r#" - fn foo(i : u32) -> u32 { - i<|> - }"#, - "j", - r#" - fn foo(j : u32) -> u32 { - j - }"#, - ); -} - -#[test] -fn test_rename_refs_for_fn_param() { - test_rename( - r#" - fn foo(i<|> : u32) -> u32 { - i - }"#, - "new_name", - r#" - fn foo(new_name : u32) -> u32 { - new_name - }"#, - ); -} - -#[test] -fn test_rename_for_mut_param() { - test_rename( - r#" - fn foo(mut i<|> : u32) -> u32 { - i - }"#, - "new_name", - r#" - fn foo(mut new_name : u32) -> u32 { - new_name - }"#, - ); -} - -fn test_rename(text: &str, new_name: &str, expected: &str) { - let (analysis, position) = single_file_with_position(text); - let edits = analysis.rename(position, new_name).unwrap(); - let mut text_edit_bulder = ra_text_edit::TextEditBuilder::default(); - let mut file_id: Option = None; - for edit in edits { - file_id = Some(edit.file_id); - for atom in edit.edit.as_atoms() { - text_edit_bulder.replace(atom.delete, atom.insert.clone()); - } - } - let result = text_edit_bulder - .finish() - .apply(&*analysis.file_text(file_id.unwrap())); - assert_eq_text!(expected, &*result); -} - -#[test] -fn world_symbols_include_stuff_from_macros() { - let (analysis, _) = single_file( - " -salsa::query_group! { -pub trait HirDatabase: SyntaxDatabase {} -} - ", - ); - - let mut symbols = analysis.symbol_search(Query::new("Hir".into())).unwrap(); - let s = symbols.pop().unwrap(); - assert_eq!(s.name(), "HirDatabase"); - assert_eq!(s.range(), TextRange::from_to(33.into(), 44.into())); -} diff --git a/crates/ra_analysis/tests/test/runnables.rs b/crates/ra_analysis/tests/test/runnables.rs deleted file mode 100644 index e6e0afbc3..000000000 --- a/crates/ra_analysis/tests/test/runnables.rs +++ /dev/null @@ -1,109 +0,0 @@ -use test_utils::assert_eq_dbg; - -use ra_analysis::mock_analysis::analysis_and_position; - -#[test] -fn test_runnables() { - let (analysis, pos) = analysis_and_position( - r#" - //- /lib.rs - <|> //empty - fn main() {} - - #[test] - fn test_foo() {} - - #[test] - #[ignore] - fn test_foo() {} - "#, - ); - let runnables = analysis.runnables(pos.file_id).unwrap(); - assert_eq_dbg( - r#"[Runnable { range: [1; 21), kind: Bin }, - Runnable { range: [22; 46), kind: Test { name: "test_foo" } }, - Runnable { range: [47; 81), kind: Test { name: "test_foo" } }]"#, - &runnables, - ) -} - -#[test] -fn test_runnables_module() { - let (analysis, pos) = analysis_and_position( - r#" - //- /lib.rs - <|> //empty - mod test_mod { - #[test] - fn test_foo1() {} - } - "#, - ); - let runnables = analysis.runnables(pos.file_id).unwrap(); - assert_eq_dbg( - r#"[Runnable { range: [1; 59), kind: TestMod { path: "test_mod" } }, - Runnable { range: [28; 57), kind: Test { name: "test_foo1" } }]"#, - &runnables, - ) -} - -#[test] -fn test_runnables_one_depth_layer_module() { - let (analysis, pos) = analysis_and_position( - r#" - //- /lib.rs - <|> //empty - mod foo { - mod test_mod { - #[test] - fn test_foo1() {} - } - } - "#, - ); - let runnables = analysis.runnables(pos.file_id).unwrap(); - assert_eq_dbg( - r#"[Runnable { range: [23; 85), kind: TestMod { path: "foo::test_mod" } }, - Runnable { range: [46; 79), kind: Test { name: "test_foo1" } }]"#, - &runnables, - ) -} - -#[test] -fn test_runnables_multiple_depth_module() { - let (analysis, pos) = analysis_and_position( - r#" - //- /lib.rs - <|> //empty - mod foo { - mod bar { - mod test_mod { - #[test] - fn test_foo1() {} - } - } - } - "#, - ); - let runnables = analysis.runnables(pos.file_id).unwrap(); - assert_eq_dbg( - r#"[Runnable { range: [41; 115), kind: TestMod { path: "foo::bar::test_mod" } }, - Runnable { range: [68; 105), kind: Test { name: "test_foo1" } }]"#, - &runnables, - ) -} - -#[test] -fn test_runnables_no_test_function_in_module() { - let (analysis, pos) = analysis_and_position( - r#" - //- /lib.rs - <|> //empty - mod test_mod { - fn foo1() {} - } - "#, - ); - let runnables = analysis.runnables(pos.file_id).unwrap(); - assert_eq_dbg(r#"[]"#, &runnables) -} -- cgit v1.2.3 From 5b573deb20b15451788dd2861e9fc6e69ed0472e Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Tue, 8 Jan 2019 22:33:36 +0300 Subject: fix usages after rename --- crates/ra_ide_api/Cargo.toml | 23 + crates/ra_ide_api/src/call_info.rs | 451 ++++++++++++++++++ crates/ra_ide_api/src/completion.rs | 77 ++++ crates/ra_ide_api/src/completion/complete_dot.rs | 121 +++++ .../ra_ide_api/src/completion/complete_fn_param.rs | 102 +++++ .../ra_ide_api/src/completion/complete_keyword.rs | 339 ++++++++++++++ crates/ra_ide_api/src/completion/complete_path.rs | 128 ++++++ crates/ra_ide_api/src/completion/complete_scope.rs | 192 ++++++++ .../ra_ide_api/src/completion/complete_snippet.rs | 73 +++ .../src/completion/completion_context.rs | 205 +++++++++ .../ra_ide_api/src/completion/completion_item.rs | 244 ++++++++++ crates/ra_ide_api/src/db.rs | 128 ++++++ crates/ra_ide_api/src/extend_selection.rs | 56 +++ crates/ra_ide_api/src/goto_defenition.rs | 139 ++++++ crates/ra_ide_api/src/hover.rs | 257 +++++++++++ crates/ra_ide_api/src/imp.rs | 309 +++++++++++++ crates/ra_ide_api/src/lib.rs | 509 +++++++++++++++++++++ crates/ra_ide_api/src/mock_analysis.rs | 135 ++++++ crates/ra_ide_api/src/runnables.rs | 89 ++++ crates/ra_ide_api/src/symbol_index.rs | 222 +++++++++ crates/ra_ide_api/src/syntax_highlighting.rs | 92 ++++ crates/ra_ide_api/tests/test/main.rs | 249 ++++++++++ crates/ra_ide_api/tests/test/runnables.rs | 109 +++++ crates/ra_lsp_server/Cargo.toml | 2 +- crates/ra_lsp_server/src/conv.rs | 2 +- crates/ra_lsp_server/src/main_loop.rs | 2 +- crates/ra_lsp_server/src/main_loop/handlers.rs | 4 +- .../ra_lsp_server/src/main_loop/subscriptions.rs | 2 +- crates/ra_lsp_server/src/server_world.rs | 6 +- 29 files changed, 4258 insertions(+), 9 deletions(-) create mode 100644 crates/ra_ide_api/Cargo.toml create mode 100644 crates/ra_ide_api/src/call_info.rs create mode 100644 crates/ra_ide_api/src/completion.rs create mode 100644 crates/ra_ide_api/src/completion/complete_dot.rs create mode 100644 crates/ra_ide_api/src/completion/complete_fn_param.rs create mode 100644 crates/ra_ide_api/src/completion/complete_keyword.rs create mode 100644 crates/ra_ide_api/src/completion/complete_path.rs create mode 100644 crates/ra_ide_api/src/completion/complete_scope.rs create mode 100644 crates/ra_ide_api/src/completion/complete_snippet.rs create mode 100644 crates/ra_ide_api/src/completion/completion_context.rs create mode 100644 crates/ra_ide_api/src/completion/completion_item.rs create mode 100644 crates/ra_ide_api/src/db.rs create mode 100644 crates/ra_ide_api/src/extend_selection.rs create mode 100644 crates/ra_ide_api/src/goto_defenition.rs create mode 100644 crates/ra_ide_api/src/hover.rs create mode 100644 crates/ra_ide_api/src/imp.rs create mode 100644 crates/ra_ide_api/src/lib.rs create mode 100644 crates/ra_ide_api/src/mock_analysis.rs create mode 100644 crates/ra_ide_api/src/runnables.rs create mode 100644 crates/ra_ide_api/src/symbol_index.rs create mode 100644 crates/ra_ide_api/src/syntax_highlighting.rs create mode 100644 crates/ra_ide_api/tests/test/main.rs create mode 100644 crates/ra_ide_api/tests/test/runnables.rs (limited to 'crates') diff --git a/crates/ra_ide_api/Cargo.toml b/crates/ra_ide_api/Cargo.toml new file mode 100644 index 000000000..d42a664b6 --- /dev/null +++ b/crates/ra_ide_api/Cargo.toml @@ -0,0 +1,23 @@ +[package] +edition = "2018" +name = "ra_ide_api" +version = "0.1.0" +authors = ["Aleksey Kladov "] + +[dependencies] +itertools = "0.8.0" +log = "0.4.5" +relative-path = "0.4.0" +rayon = "1.0.2" +fst = "0.3.1" +salsa = "0.9.1" +rustc-hash = "1.0" +parking_lot = "0.7.0" +unicase = "2.2.0" + +ra_syntax = { path = "../ra_syntax" } +ra_ide_api_light = { path = "../ra_ide_api_light" } +ra_text_edit = { path = "../ra_text_edit" } +ra_db = { path = "../ra_db" } +hir = { path = "../ra_hir", package = "ra_hir" } +test_utils = { path = "../test_utils" } diff --git a/crates/ra_ide_api/src/call_info.rs b/crates/ra_ide_api/src/call_info.rs new file mode 100644 index 000000000..27b760780 --- /dev/null +++ b/crates/ra_ide_api/src/call_info.rs @@ -0,0 +1,451 @@ +use std::cmp::{max, min}; + +use ra_db::{SyntaxDatabase, Cancelable}; +use ra_syntax::{ + AstNode, SyntaxNode, TextUnit, TextRange, + SyntaxKind::FN_DEF, + ast::{self, ArgListOwner, DocCommentsOwner}, + algo::find_node_at_offset, +}; + +use crate::{FilePosition, CallInfo, db::RootDatabase}; + +/// Computes parameter information for the given call expression. +pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Cancelable> { + let file = db.source_file(position.file_id); + let syntax = file.syntax(); + + // Find the calling expression and it's NameRef + let calling_node = ctry!(FnCallNode::with_node(syntax, position.offset)); + let name_ref = ctry!(calling_node.name_ref()); + + // Resolve the function's NameRef (NOTE: this isn't entirely accurate). + let file_symbols = db.index_resolve(name_ref)?; + let symbol = ctry!(file_symbols.into_iter().find(|it| it.ptr.kind() == FN_DEF)); + let fn_file = db.source_file(symbol.file_id); + let fn_def = symbol.ptr.resolve(&fn_file); + let fn_def = ast::FnDef::cast(&fn_def).unwrap(); + let mut call_info = ctry!(CallInfo::new(fn_def)); + // If we have a calling expression let's find which argument we are on + let num_params = call_info.parameters.len(); + let has_self = fn_def.param_list().and_then(|l| l.self_param()).is_some(); + + if num_params == 1 { + if !has_self { + call_info.active_parameter = Some(0); + } + } else if num_params > 1 { + // Count how many parameters into the call we are. + // TODO: This is best effort for now and should be fixed at some point. + // It may be better to see where we are in the arg_list and then check + // where offset is in that list (or beyond). + // Revisit this after we get documentation comments in. + if let Some(ref arg_list) = calling_node.arg_list() { + let start = arg_list.syntax().range().start(); + + let range_search = TextRange::from_to(start, position.offset); + let mut commas: usize = arg_list + .syntax() + .text() + .slice(range_search) + .to_string() + .matches(',') + .count(); + + // If we have a method call eat the first param since it's just self. + if has_self { + commas += 1; + } + + call_info.active_parameter = Some(commas); + } + } + + Ok(Some(call_info)) +} + +enum FnCallNode<'a> { + CallExpr(&'a ast::CallExpr), + MethodCallExpr(&'a ast::MethodCallExpr), +} + +impl<'a> FnCallNode<'a> { + pub fn with_node(syntax: &'a SyntaxNode, offset: TextUnit) -> Option> { + if let Some(expr) = find_node_at_offset::(syntax, offset) { + return Some(FnCallNode::CallExpr(expr)); + } + if let Some(expr) = find_node_at_offset::(syntax, offset) { + return Some(FnCallNode::MethodCallExpr(expr)); + } + None + } + + pub fn name_ref(&self) -> Option<&'a ast::NameRef> { + match *self { + FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() { + ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?, + _ => return None, + }), + + FnCallNode::MethodCallExpr(call_expr) => call_expr + .syntax() + .children() + .filter_map(ast::NameRef::cast) + .nth(0), + } + } + + pub fn arg_list(&self) -> Option<&'a ast::ArgList> { + match *self { + FnCallNode::CallExpr(expr) => expr.arg_list(), + FnCallNode::MethodCallExpr(expr) => expr.arg_list(), + } + } +} + +impl CallInfo { + fn new(node: &ast::FnDef) -> Option { + let mut doc = None; + + // Strip the body out for the label. + let mut label: String = if let Some(body) = node.body() { + let body_range = body.syntax().range(); + let label: String = node + .syntax() + .children() + .filter(|child| !child.range().is_subrange(&body_range)) + .map(|node| node.text().to_string()) + .collect(); + label + } else { + node.syntax().text().to_string() + }; + + if let Some((comment_range, docs)) = extract_doc_comments(node) { + let comment_range = comment_range + .checked_sub(node.syntax().range().start()) + .unwrap(); + let start = comment_range.start().to_usize(); + let end = comment_range.end().to_usize(); + + // Remove the comment from the label + label.replace_range(start..end, ""); + + // Massage markdown + let mut processed_lines = Vec::new(); + let mut in_code_block = false; + for line in docs.lines() { + if line.starts_with("```") { + in_code_block = !in_code_block; + } + + let line = if in_code_block && line.starts_with("```") && !line.contains("rust") { + "```rust".into() + } else { + line.to_string() + }; + + processed_lines.push(line); + } + + if !processed_lines.is_empty() { + doc = Some(processed_lines.join("\n")); + } + } + + Some(CallInfo { + parameters: param_list(node), + label: label.trim().to_owned(), + doc, + active_parameter: None, + }) + } +} + +fn extract_doc_comments(node: &ast::FnDef) -> Option<(TextRange, String)> { + if node.doc_comments().count() == 0 { + return None; + } + + let comment_text = node.doc_comment_text(); + + let (begin, end) = node + .doc_comments() + .map(|comment| comment.syntax().range()) + .map(|range| (range.start().to_usize(), range.end().to_usize())) + .fold((std::usize::MAX, std::usize::MIN), |acc, range| { + (min(acc.0, range.0), max(acc.1, range.1)) + }); + + let range = TextRange::from_to(TextUnit::from_usize(begin), TextUnit::from_usize(end)); + + Some((range, comment_text)) +} + +fn param_list(node: &ast::FnDef) -> Vec { + let mut res = vec![]; + if let Some(param_list) = node.param_list() { + if let Some(self_param) = param_list.self_param() { + res.push(self_param.syntax().text().to_string()) + } + + // Maybe use param.pat here? See if we can just extract the name? + //res.extend(param_list.params().map(|p| p.syntax().text().to_string())); + res.extend( + param_list + .params() + .filter_map(|p| p.pat()) + .map(|pat| pat.syntax().text().to_string()), + ); + } + res +} + +#[cfg(test)] +mod tests { + use super::*; + + use crate::mock_analysis::single_file_with_position; + + fn call_info(text: &str) -> CallInfo { + let (analysis, position) = single_file_with_position(text); + analysis.call_info(position).unwrap().unwrap() + } + + #[test] + fn test_fn_signature_two_args_first() { + let info = call_info( + r#"fn foo(x: u32, y: u32) -> u32 {x + y} +fn bar() { foo(<|>3, ); }"#, + ); + + assert_eq!(info.parameters, vec!("x".to_string(), "y".to_string())); + assert_eq!(info.active_parameter, Some(0)); + } + + #[test] + fn test_fn_signature_two_args_second() { + let info = call_info( + r#"fn foo(x: u32, y: u32) -> u32 {x + y} +fn bar() { foo(3, <|>); }"#, + ); + + assert_eq!(info.parameters, vec!("x".to_string(), "y".to_string())); + assert_eq!(info.active_parameter, Some(1)); + } + + #[test] + fn test_fn_signature_for_impl() { + let info = call_info( + r#"struct F; impl F { pub fn new() { F{}} } +fn bar() {let _ : F = F::new(<|>);}"#, + ); + + assert_eq!(info.parameters, Vec::::new()); + assert_eq!(info.active_parameter, None); + } + + #[test] + fn test_fn_signature_for_method_self() { + let info = call_info( + r#"struct F; +impl F { + pub fn new() -> F{ + F{} + } + + pub fn do_it(&self) {} +} + +fn bar() { + let f : F = F::new(); + f.do_it(<|>); +}"#, + ); + + assert_eq!(info.parameters, vec!["&self".to_string()]); + assert_eq!(info.active_parameter, None); + } + + #[test] + fn test_fn_signature_for_method_with_arg() { + let info = call_info( + r#"struct F; +impl F { + pub fn new() -> F{ + F{} + } + + pub fn do_it(&self, x: i32) {} +} + +fn bar() { + let f : F = F::new(); + f.do_it(<|>); +}"#, + ); + + assert_eq!(info.parameters, vec!["&self".to_string(), "x".to_string()]); + assert_eq!(info.active_parameter, Some(1)); + } + + #[test] + fn test_fn_signature_with_docs_simple() { + let info = call_info( + r#" +/// test +// non-doc-comment +fn foo(j: u32) -> u32 { + j +} + +fn bar() { + let _ = foo(<|>); +} +"#, + ); + + assert_eq!(info.parameters, vec!["j".to_string()]); + assert_eq!(info.active_parameter, Some(0)); + assert_eq!(info.label, "fn foo(j: u32) -> u32".to_string()); + assert_eq!(info.doc, Some("test".into())); + } + + #[test] + fn test_fn_signature_with_docs() { + let info = call_info( + r#" +/// Adds one to the number given. +/// +/// # Examples +/// +/// ``` +/// let five = 5; +/// +/// assert_eq!(6, my_crate::add_one(5)); +/// ``` +pub fn add_one(x: i32) -> i32 { + x + 1 +} + +pub fn do() { + add_one(<|> +}"#, + ); + + assert_eq!(info.parameters, vec!["x".to_string()]); + assert_eq!(info.active_parameter, Some(0)); + assert_eq!(info.label, "pub fn add_one(x: i32) -> i32".to_string()); + assert_eq!( + info.doc, + Some( + r#"Adds one to the number given. + +# Examples + +```rust +let five = 5; + +assert_eq!(6, my_crate::add_one(5)); +```"# + .into() + ) + ); + } + + #[test] + fn test_fn_signature_with_docs_impl() { + let info = call_info( + r#" +struct addr; +impl addr { + /// Adds one to the number given. + /// + /// # Examples + /// + /// ``` + /// let five = 5; + /// + /// assert_eq!(6, my_crate::add_one(5)); + /// ``` + pub fn add_one(x: i32) -> i32 { + x + 1 + } +} + +pub fn do_it() { + addr {}; + addr::add_one(<|>); +}"#, + ); + + assert_eq!(info.parameters, vec!["x".to_string()]); + assert_eq!(info.active_parameter, Some(0)); + assert_eq!(info.label, "pub fn add_one(x: i32) -> i32".to_string()); + assert_eq!( + info.doc, + Some( + r#"Adds one to the number given. + +# Examples + +```rust +let five = 5; + +assert_eq!(6, my_crate::add_one(5)); +```"# + .into() + ) + ); + } + + #[test] + fn test_fn_signature_with_docs_from_actix() { + let info = call_info( + r#" +pub trait WriteHandler +where + Self: Actor, + Self::Context: ActorContext, +{ + /// Method is called when writer emits error. + /// + /// If this method returns `ErrorAction::Continue` writer processing + /// continues otherwise stream processing stops. + fn error(&mut self, err: E, ctx: &mut Self::Context) -> Running { + Running::Stop + } + + /// Method is called when writer finishes. + /// + /// By default this method stops actor's `Context`. + fn finished(&mut self, ctx: &mut Self::Context) { + ctx.stop() + } +} + +pub fn foo() { + WriteHandler r; + r.finished(<|>); +} + +"#, + ); + + assert_eq!( + info.parameters, + vec!["&mut self".to_string(), "ctx".to_string()] + ); + assert_eq!(info.active_parameter, Some(1)); + assert_eq!( + info.doc, + Some( + r#"Method is called when writer finishes. + +By default this method stops actor's `Context`."# + .into() + ) + ); + } + +} diff --git a/crates/ra_ide_api/src/completion.rs b/crates/ra_ide_api/src/completion.rs new file mode 100644 index 000000000..ce777a771 --- /dev/null +++ b/crates/ra_ide_api/src/completion.rs @@ -0,0 +1,77 @@ +mod completion_item; +mod completion_context; + +mod complete_dot; +mod complete_fn_param; +mod complete_keyword; +mod complete_snippet; +mod complete_path; +mod complete_scope; + +use ra_db::SyntaxDatabase; + +use crate::{ + db, + Cancelable, FilePosition, + completion::{ + completion_item::{Completions, CompletionKind}, + completion_context::CompletionContext, + }, +}; + +pub use crate::completion::completion_item::{CompletionItem, InsertText, CompletionItemKind}; + +/// Main entry point for completion. We run completion as a two-phase process. +/// +/// First, we look at the position and collect a so-called `CompletionContext. +/// This is a somewhat messy process, because, during completion, syntax tree is +/// incomplete and can look really weird. +/// +/// Once the context is collected, we run a series of completion routines which +/// look at the context and produce completion items. One subtelty about this +/// phase is that completion engine should not filter by the substring which is +/// already present, it should give all possible variants for the identifier at +/// the caret. In other words, for +/// +/// ```no-run +/// fn f() { +/// let foo = 92; +/// let _ = bar<|> +/// } +/// ``` +/// +/// `foo` *should* be present among the completion variants. Filtering by +/// identifier prefix/fuzzy match should be done higher in the stack, together +/// with ordering of completions (currently this is done by the client). +pub(crate) fn completions( + db: &db::RootDatabase, + position: FilePosition, +) -> Cancelable> { + let original_file = db.source_file(position.file_id); + let ctx = ctry!(CompletionContext::new(db, &original_file, position)?); + + let mut acc = Completions::default(); + + complete_fn_param::complete_fn_param(&mut acc, &ctx); + complete_keyword::complete_expr_keyword(&mut acc, &ctx); + complete_keyword::complete_use_tree_keyword(&mut acc, &ctx); + complete_snippet::complete_expr_snippet(&mut acc, &ctx); + complete_snippet::complete_item_snippet(&mut acc, &ctx); + complete_path::complete_path(&mut acc, &ctx)?; + complete_scope::complete_scope(&mut acc, &ctx)?; + complete_dot::complete_dot(&mut acc, &ctx)?; + + Ok(Some(acc)) +} + +#[cfg(test)] +fn check_completion(code: &str, expected_completions: &str, kind: CompletionKind) { + use crate::mock_analysis::{single_file_with_position, analysis_and_position}; + let (analysis, position) = if code.contains("//-") { + analysis_and_position(code) + } else { + single_file_with_position(code) + }; + let completions = completions(&analysis.db, position).unwrap().unwrap(); + completions.assert_match(expected_completions, kind); +} diff --git a/crates/ra_ide_api/src/completion/complete_dot.rs b/crates/ra_ide_api/src/completion/complete_dot.rs new file mode 100644 index 000000000..5d4e60dc5 --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_dot.rs @@ -0,0 +1,121 @@ +use hir::{Ty, Def}; + +use crate::Cancelable; +use crate::completion::{CompletionContext, Completions, CompletionKind, CompletionItem, CompletionItemKind}; + +/// Complete dot accesses, i.e. fields or methods (currently only fields). +pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) -> Cancelable<()> { + let (function, receiver) = match (&ctx.function, ctx.dot_receiver) { + (Some(function), Some(receiver)) => (function, receiver), + _ => return Ok(()), + }; + let infer_result = function.infer(ctx.db)?; + let syntax_mapping = function.body_syntax_mapping(ctx.db)?; + let expr = match syntax_mapping.node_expr(receiver) { + Some(expr) => expr, + None => return Ok(()), + }; + let receiver_ty = infer_result[expr].clone(); + if !ctx.is_method_call { + complete_fields(acc, ctx, receiver_ty)?; + } + Ok(()) +} + +fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) -> Cancelable<()> { + for receiver in receiver.autoderef(ctx.db) { + match receiver { + Ty::Adt { def_id, .. } => { + match def_id.resolve(ctx.db)? { + Def::Struct(s) => { + let variant_data = s.variant_data(ctx.db)?; + for field in variant_data.fields() { + CompletionItem::new( + CompletionKind::Reference, + field.name().to_string(), + ) + .kind(CompletionItemKind::Field) + .add_to(acc); + } + } + // TODO unions + _ => {} + } + } + Ty::Tuple(fields) => { + for (i, _ty) in fields.iter().enumerate() { + CompletionItem::new(CompletionKind::Reference, i.to_string()) + .kind(CompletionItemKind::Field) + .add_to(acc); + } + } + _ => {} + }; + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use crate::completion::*; + + fn check_ref_completion(code: &str, expected_completions: &str) { + check_completion(code, expected_completions, CompletionKind::Reference); + } + + #[test] + fn test_struct_field_completion() { + check_ref_completion( + r" + struct A { the_field: u32 } + fn foo(a: A) { + a.<|> + } + ", + r#"the_field"#, + ); + } + + #[test] + fn test_struct_field_completion_self() { + check_ref_completion( + r" + struct A { the_field: u32 } + impl A { + fn foo(self) { + self.<|> + } + } + ", + r#"the_field"#, + ); + } + + #[test] + fn test_struct_field_completion_autoderef() { + check_ref_completion( + r" + struct A { the_field: u32 } + impl A { + fn foo(&self) { + self.<|> + } + } + ", + r#"the_field"#, + ); + } + + #[test] + fn test_no_struct_field_completion_for_method_call() { + check_ref_completion( + r" + struct A { the_field: u32 } + fn foo(a: A) { + a.<|>() + } + ", + r#""#, + ); + } +} diff --git a/crates/ra_ide_api/src/completion/complete_fn_param.rs b/crates/ra_ide_api/src/completion/complete_fn_param.rs new file mode 100644 index 000000000..c1739e47e --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_fn_param.rs @@ -0,0 +1,102 @@ +use ra_syntax::{ + algo::visit::{visitor_ctx, VisitorCtx}, + ast, + AstNode, +}; +use rustc_hash::FxHashMap; + +use crate::completion::{CompletionContext, Completions, CompletionKind, CompletionItem}; + +/// Complete repeated parametes, both name and type. For example, if all +/// functions in a file have a `spam: &mut Spam` parameter, a completion with +/// `spam: &mut Spam` insert text/label and `spam` lookup string will be +/// suggested. +pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) { + if !ctx.is_param { + return; + } + + let mut params = FxHashMap::default(); + for node in ctx.leaf.ancestors() { + let _ = visitor_ctx(&mut params) + .visit::(process) + .visit::(process) + .accept(node); + } + params + .into_iter() + .filter_map(|(label, (count, param))| { + let lookup = param.pat()?.syntax().text().to_string(); + if count < 2 { + None + } else { + Some((label, lookup)) + } + }) + .for_each(|(label, lookup)| { + CompletionItem::new(CompletionKind::Magic, label) + .lookup_by(lookup) + .add_to(acc) + }); + + fn process<'a, N: ast::FnDefOwner>( + node: &'a N, + params: &mut FxHashMap, + ) { + node.functions() + .filter_map(|it| it.param_list()) + .flat_map(|it| it.params()) + .for_each(|param| { + let text = param.syntax().text().to_string(); + params.entry(text).or_insert((0, param)).0 += 1; + }) + } +} + +#[cfg(test)] +mod tests { + use crate::completion::*; + + fn check_magic_completion(code: &str, expected_completions: &str) { + check_completion(code, expected_completions, CompletionKind::Magic); + } + + #[test] + fn test_param_completion_last_param() { + check_magic_completion( + r" + fn foo(file_id: FileId) {} + fn bar(file_id: FileId) {} + fn baz(file<|>) {} + ", + r#"file_id "file_id: FileId""#, + ); + } + + #[test] + fn test_param_completion_nth_param() { + check_magic_completion( + r" + fn foo(file_id: FileId) {} + fn bar(file_id: FileId) {} + fn baz(file<|>, x: i32) {} + ", + r#"file_id "file_id: FileId""#, + ); + } + + #[test] + fn test_param_completion_trait_param() { + check_magic_completion( + r" + pub(crate) trait SourceRoot { + pub fn contains(&self, file_id: FileId) -> bool; + pub fn module_map(&self) -> &ModuleMap; + pub fn lines(&self, file_id: FileId) -> &LineIndex; + pub fn syntax(&self, file<|>) + } + ", + r#"file_id "file_id: FileId""#, + ); + } +} diff --git a/crates/ra_ide_api/src/completion/complete_keyword.rs b/crates/ra_ide_api/src/completion/complete_keyword.rs new file mode 100644 index 000000000..d350f06ce --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_keyword.rs @@ -0,0 +1,339 @@ +use ra_syntax::{ + algo::visit::{visitor, Visitor}, + AstNode, + ast::{self, LoopBodyOwner}, + SyntaxKind::*, SyntaxNode, +}; + +use crate::completion::{CompletionContext, CompletionItem, Completions, CompletionKind, CompletionItemKind}; + +pub(super) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionContext) { + // complete keyword "crate" in use stmt + match (ctx.use_item_syntax.as_ref(), ctx.path_prefix.as_ref()) { + (Some(_), None) => { + CompletionItem::new(CompletionKind::Keyword, "crate") + .kind(CompletionItemKind::Keyword) + .lookup_by("crate") + .snippet("crate::") + .add_to(acc); + CompletionItem::new(CompletionKind::Keyword, "self") + .kind(CompletionItemKind::Keyword) + .lookup_by("self") + .add_to(acc); + CompletionItem::new(CompletionKind::Keyword, "super") + .kind(CompletionItemKind::Keyword) + .lookup_by("super") + .add_to(acc); + } + (Some(_), Some(_)) => { + CompletionItem::new(CompletionKind::Keyword, "self") + .kind(CompletionItemKind::Keyword) + .lookup_by("self") + .add_to(acc); + CompletionItem::new(CompletionKind::Keyword, "super") + .kind(CompletionItemKind::Keyword) + .lookup_by("super") + .add_to(acc); + } + _ => {} + } +} + +fn keyword(kw: &str, snippet: &str) -> CompletionItem { + CompletionItem::new(CompletionKind::Keyword, kw) + .kind(CompletionItemKind::Keyword) + .snippet(snippet) + .build() +} + +pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) { + if !ctx.is_trivial_path { + return; + } + + let fn_def = match ctx.function_syntax { + Some(it) => it, + None => return, + }; + acc.add(keyword("if", "if $0 {}")); + acc.add(keyword("match", "match $0 {}")); + acc.add(keyword("while", "while $0 {}")); + acc.add(keyword("loop", "loop {$0}")); + + if ctx.after_if { + acc.add(keyword("else", "else {$0}")); + acc.add(keyword("else if", "else if $0 {}")); + } + if is_in_loop_body(ctx.leaf) { + if ctx.can_be_stmt { + acc.add(keyword("continue", "continue;")); + acc.add(keyword("break", "break;")); + } else { + acc.add(keyword("continue", "continue")); + acc.add(keyword("break", "break")); + } + } + acc.add_all(complete_return(fn_def, ctx.can_be_stmt)); +} + +fn is_in_loop_body(leaf: &SyntaxNode) -> bool { + for node in leaf.ancestors() { + if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR { + break; + } + let loop_body = visitor() + .visit::(LoopBodyOwner::loop_body) + .visit::(LoopBodyOwner::loop_body) + .visit::(LoopBodyOwner::loop_body) + .accept(node); + if let Some(Some(body)) = loop_body { + if leaf.range().is_subrange(&body.syntax().range()) { + return true; + } + } + } + false +} + +fn complete_return(fn_def: &ast::FnDef, can_be_stmt: bool) -> Option { + let snip = match (can_be_stmt, fn_def.ret_type().is_some()) { + (true, true) => "return $0;", + (true, false) => "return;", + (false, true) => "return $0", + (false, false) => "return", + }; + Some(keyword("return", snip)) +} + +#[cfg(test)] +mod tests { + use crate::completion::{CompletionKind, check_completion}; + fn check_keyword_completion(code: &str, expected_completions: &str) { + check_completion(code, expected_completions, CompletionKind::Keyword); + } + + #[test] + fn completes_keywords_in_use_stmt() { + check_keyword_completion( + r" + use <|> + ", + r#" + crate "crate" "crate::" + self "self" + super "super" + "#, + ); + + check_keyword_completion( + r" + use a::<|> + ", + r#" + self "self" + super "super" + "#, + ); + + check_keyword_completion( + r" + use a::{b, <|>} + ", + r#" + self "self" + super "super" + "#, + ); + } + + #[test] + fn completes_various_keywords_in_function() { + check_keyword_completion( + r" + fn quux() { + <|> + } + ", + r#" + if "if $0 {}" + match "match $0 {}" + while "while $0 {}" + loop "loop {$0}" + return "return;" + "#, + ); + } + + #[test] + fn completes_else_after_if() { + check_keyword_completion( + r" + fn quux() { + if true { + () + } <|> + } + ", + r#" + if "if $0 {}" + match "match $0 {}" + while "while $0 {}" + loop "loop {$0}" + else "else {$0}" + else if "else if $0 {}" + return "return;" + "#, + ); + } + + #[test] + fn test_completion_return_value() { + check_keyword_completion( + r" + fn quux() -> i32 { + <|> + 92 + } + ", + r#" + if "if $0 {}" + match "match $0 {}" + while "while $0 {}" + loop "loop {$0}" + return "return $0;" + "#, + ); + check_keyword_completion( + r" + fn quux() { + <|> + 92 + } + ", + r#" + if "if $0 {}" + match "match $0 {}" + while "while $0 {}" + loop "loop {$0}" + return "return;" + "#, + ); + } + + #[test] + fn dont_add_semi_after_return_if_not_a_statement() { + check_keyword_completion( + r" + fn quux() -> i32 { + match () { + () => <|> + } + } + ", + r#" + if "if $0 {}" + match "match $0 {}" + while "while $0 {}" + loop "loop {$0}" + return "return $0" + "#, + ); + } + + #[test] + fn last_return_in_block_has_semi() { + check_keyword_completion( + r" + fn quux() -> i32 { + if condition { + <|> + } + } + ", + r#" + if "if $0 {}" + match "match $0 {}" + while "while $0 {}" + loop "loop {$0}" + return "return $0;" + "#, + ); + check_keyword_completion( + r" + fn quux() -> i32 { + if condition { + <|> + } + let x = 92; + x + } + ", + r#" + if "if $0 {}" + match "match $0 {}" + while "while $0 {}" + loop "loop {$0}" + return "return $0;" + "#, + ); + } + + #[test] + fn completes_break_and_continue_in_loops() { + check_keyword_completion( + r" + fn quux() -> i32 { + loop { <|> } + } + ", + r#" + if "if $0 {}" + match "match $0 {}" + while "while $0 {}" + loop "loop {$0}" + continue "continue;" + break "break;" + return "return $0;" + "#, + ); + // No completion: lambda isolates control flow + check_keyword_completion( + r" + fn quux() -> i32 { + loop { || { <|> } } + } + ", + r#" + if "if $0 {}" + match "match $0 {}" + while "while $0 {}" + loop "loop {$0}" + return "return $0;" + "#, + ); + } + + #[test] + fn no_semi_after_break_continue_in_expr() { + check_keyword_completion( + r" + fn f() { + loop { + match () { + () => br<|> + } + } + } + ", + r#" + if "if $0 {}" + match "match $0 {}" + while "while $0 {}" + loop "loop {$0}" + continue "continue" + break "break" + return "return" + "#, + ) + } +} diff --git a/crates/ra_ide_api/src/completion/complete_path.rs b/crates/ra_ide_api/src/completion/complete_path.rs new file mode 100644 index 000000000..4723a65a6 --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_path.rs @@ -0,0 +1,128 @@ +use crate::{ + Cancelable, + completion::{CompletionItem, CompletionItemKind, Completions, CompletionKind, CompletionContext}, +}; + +pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) -> Cancelable<()> { + let (path, module) = match (&ctx.path_prefix, &ctx.module) { + (Some(path), Some(module)) => (path.clone(), module), + _ => return Ok(()), + }; + let def_id = match module.resolve_path(ctx.db, &path)?.take_types() { + Some(it) => it, + None => return Ok(()), + }; + match def_id.resolve(ctx.db)? { + hir::Def::Module(module) => { + let module_scope = module.scope(ctx.db)?; + module_scope.entries().for_each(|(name, res)| { + CompletionItem::new(CompletionKind::Reference, name.to_string()) + .from_resolution(ctx, res) + .add_to(acc) + }); + } + hir::Def::Enum(e) => e + .variants(ctx.db)? + .into_iter() + .for_each(|(name, _variant)| { + CompletionItem::new(CompletionKind::Reference, name.to_string()) + .kind(CompletionItemKind::EnumVariant) + .add_to(acc) + }), + _ => return Ok(()), + }; + Ok(()) +} + +#[cfg(test)] +mod tests { + use crate::completion::{CompletionKind, check_completion}; + + fn check_reference_completion(code: &str, expected_completions: &str) { + check_completion(code, expected_completions, CompletionKind::Reference); + } + + #[test] + fn completes_use_item_starting_with_self() { + check_reference_completion( + r" + use self::m::<|>; + + mod m { + struct Bar; + } + ", + "Bar", + ); + } + + #[test] + fn completes_use_item_starting_with_crate() { + check_reference_completion( + " + //- /lib.rs + mod foo; + struct Spam; + //- /foo.rs + use crate::Sp<|> + ", + "Spam;foo", + ); + } + + #[test] + fn completes_nested_use_tree() { + check_reference_completion( + " + //- /lib.rs + mod foo; + struct Spam; + //- /foo.rs + use crate::{Sp<|>}; + ", + "Spam;foo", + ); + } + + #[test] + fn completes_deeply_nested_use_tree() { + check_reference_completion( + " + //- /lib.rs + mod foo; + pub mod bar { + pub mod baz { + pub struct Spam; + } + } + //- /foo.rs + use crate::{bar::{baz::Sp<|>}}; + ", + "Spam", + ); + } + + #[test] + fn completes_enum_variant() { + check_reference_completion( + " + //- /lib.rs + enum E { Foo, Bar(i32) } + fn foo() { let _ = E::<|> } + ", + "Foo;Bar", + ); + } + + #[test] + fn dont_render_function_parens_in_use_item() { + check_reference_completion( + " + //- /lib.rs + mod m { pub fn foo() {} } + use crate::m::f<|>; + ", + "foo", + ) + } +} diff --git a/crates/ra_ide_api/src/completion/complete_scope.rs b/crates/ra_ide_api/src/completion/complete_scope.rs new file mode 100644 index 000000000..ee9052d3d --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_scope.rs @@ -0,0 +1,192 @@ +use rustc_hash::FxHashSet; +use ra_syntax::TextUnit; + +use crate::{ + Cancelable, + completion::{CompletionItem, CompletionItemKind, Completions, CompletionKind, CompletionContext}, +}; + +pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) -> Cancelable<()> { + if !ctx.is_trivial_path { + return Ok(()); + } + let module = match &ctx.module { + Some(it) => it, + None => return Ok(()), + }; + if let Some(function) = &ctx.function { + let scopes = function.scopes(ctx.db)?; + complete_fn(acc, &scopes, ctx.offset); + } + + let module_scope = module.scope(ctx.db)?; + let (file_id, _) = module.defenition_source(ctx.db)?; + module_scope + .entries() + .filter(|(_name, res)| { + // Don't expose this item + // FIXME: this penetrates through all kinds of abstractions, + // we need to figura out the way to do it less ugly. + match res.import { + None => true, + Some(import) => { + let range = import.range(ctx.db, file_id); + !range.is_subrange(&ctx.leaf.range()) + } + } + }) + .for_each(|(name, res)| { + CompletionItem::new(CompletionKind::Reference, name.to_string()) + .from_resolution(ctx, res) + .add_to(acc) + }); + Ok(()) +} + +fn complete_fn(acc: &mut Completions, scopes: &hir::ScopesWithSyntaxMapping, offset: TextUnit) { + let mut shadowed = FxHashSet::default(); + scopes + .scope_chain_for_offset(offset) + .flat_map(|scope| scopes.scopes.entries(scope).iter()) + .filter(|entry| shadowed.insert(entry.name())) + .for_each(|entry| { + CompletionItem::new(CompletionKind::Reference, entry.name().to_string()) + .kind(CompletionItemKind::Binding) + .add_to(acc) + }); +} + +#[cfg(test)] +mod tests { + use crate::completion::{CompletionKind, check_completion}; + + fn check_reference_completion(code: &str, expected_completions: &str) { + check_completion(code, expected_completions, CompletionKind::Reference); + } + + #[test] + fn completes_bindings_from_let() { + check_reference_completion( + r" + fn quux(x: i32) { + let y = 92; + 1 + <|>; + let z = (); + } + ", + r#"y;x;quux "quux($0)""#, + ); + } + + #[test] + fn completes_bindings_from_if_let() { + check_reference_completion( + r" + fn quux() { + if let Some(x) = foo() { + let y = 92; + }; + if let Some(a) = bar() { + let b = 62; + 1 + <|> + } + } + ", + r#"b;a;quux "quux()$0""#, + ); + } + + #[test] + fn completes_bindings_from_for() { + check_reference_completion( + r" + fn quux() { + for x in &[1, 2, 3] { + <|> + } + } + ", + r#"x;quux "quux()$0""#, + ); + } + + #[test] + fn completes_module_items() { + check_reference_completion( + r" + struct Foo; + enum Baz {} + fn quux() { + <|> + } + ", + r#"quux "quux()$0";Foo;Baz"#, + ); + } + + #[test] + fn completes_module_items_in_nested_modules() { + check_reference_completion( + r" + struct Foo; + mod m { + struct Bar; + fn quux() { <|> } + } + ", + r#"quux "quux()$0";Bar"#, + ); + } + + #[test] + fn completes_return_type() { + check_reference_completion( + r" + struct Foo; + fn x() -> <|> + ", + r#"Foo;x "x()$0""#, + ) + } + + #[test] + fn dont_show_both_completions_for_shadowing() { + check_reference_completion( + r" + fn foo() -> { + let bar = 92; + { + let bar = 62; + <|> + } + } + ", + r#"bar;foo "foo()$0""#, + ) + } + + #[test] + fn completes_self_in_methods() { + check_reference_completion(r"impl S { fn foo(&self) { <|> } }", "self") + } + + #[test] + fn inserts_parens_for_function_calls() { + check_reference_completion( + r" + fn no_args() {} + fn main() { no_<|> } + ", + r#"no_args "no_args()$0" + main "main()$0""#, + ); + check_reference_completion( + r" + fn with_args(x: i32, y: String) {} + fn main() { with_<|> } + ", + r#"main "main()$0" + with_args "with_args($0)""#, + ); + } +} diff --git a/crates/ra_ide_api/src/completion/complete_snippet.rs b/crates/ra_ide_api/src/completion/complete_snippet.rs new file mode 100644 index 000000000..a495751dd --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_snippet.rs @@ -0,0 +1,73 @@ +use crate::completion::{CompletionItem, Completions, CompletionKind, CompletionItemKind, CompletionContext, completion_item::Builder}; + +fn snippet(label: &str, snippet: &str) -> Builder { + CompletionItem::new(CompletionKind::Snippet, label) + .snippet(snippet) + .kind(CompletionItemKind::Snippet) +} + +pub(super) fn complete_expr_snippet(acc: &mut Completions, ctx: &CompletionContext) { + if !(ctx.is_trivial_path && ctx.function_syntax.is_some()) { + return; + } + snippet("pd", "eprintln!(\"$0 = {:?}\", $0);").add_to(acc); + snippet("ppd", "eprintln!(\"$0 = {:#?}\", $0);").add_to(acc); +} + +pub(super) fn complete_item_snippet(acc: &mut Completions, ctx: &CompletionContext) { + if !ctx.is_new_item { + return; + } + snippet( + "Test function", + "\ +#[test] +fn ${1:feature}() { + $0 +}", + ) + .lookup_by("tfn") + .add_to(acc); + + snippet("pub(crate)", "pub(crate) $0").add_to(acc); +} + +#[cfg(test)] +mod tests { + use crate::completion::{CompletionKind, check_completion}; + fn check_snippet_completion(code: &str, expected_completions: &str) { + check_completion(code, expected_completions, CompletionKind::Snippet); + } + + #[test] + fn completes_snippets_in_expressions() { + check_snippet_completion( + r"fn foo(x: i32) { <|> }", + r##" + pd "eprintln!(\"$0 = {:?}\", $0);" + ppd "eprintln!(\"$0 = {:#?}\", $0);" + "##, + ); + } + + #[test] + fn completes_snippets_in_items() { + // check_snippet_completion(r" + // <|> + // ", + // r##"[CompletionItem { label: "Test function", lookup: None, snippet: Some("#[test]\nfn test_${1:feature}() {\n$0\n}"##, + // ); + check_snippet_completion( + r" + #[cfg(test)] + mod tests { + <|> + } + ", + r##" + tfn "Test function" "#[test]\nfn ${1:feature}() {\n $0\n}" + pub(crate) "pub(crate) $0" + "##, + ); + } +} diff --git a/crates/ra_ide_api/src/completion/completion_context.rs b/crates/ra_ide_api/src/completion/completion_context.rs new file mode 100644 index 000000000..01786bb69 --- /dev/null +++ b/crates/ra_ide_api/src/completion/completion_context.rs @@ -0,0 +1,205 @@ +use ra_text_edit::AtomTextEdit; +use ra_syntax::{ + AstNode, SyntaxNode, SourceFile, TextUnit, TextRange, + ast, + algo::{find_leaf_at_offset, find_covering_node, find_node_at_offset}, + SyntaxKind::*, +}; +use hir::source_binder; + +use crate::{db, FilePosition, Cancelable}; + +/// `CompletionContext` is created early during completion to figure out, where +/// exactly is the cursor, syntax-wise. +#[derive(Debug)] +pub(super) struct CompletionContext<'a> { + pub(super) db: &'a db::RootDatabase, + pub(super) offset: TextUnit, + pub(super) leaf: &'a SyntaxNode, + pub(super) module: Option, + pub(super) function: Option, + pub(super) function_syntax: Option<&'a ast::FnDef>, + pub(super) use_item_syntax: Option<&'a ast::UseItem>, + pub(super) is_param: bool, + /// A single-indent path, like `foo`. + pub(super) is_trivial_path: bool, + /// If not a trivial, path, the prefix (qualifier). + pub(super) path_prefix: Option, + pub(super) after_if: bool, + /// `true` if we are a statement or a last expr in the block. + pub(super) can_be_stmt: bool, + /// Something is typed at the "top" level, in module or impl/trait. + pub(super) is_new_item: bool, + /// The receiver if this is a field or method access, i.e. writing something.<|> + pub(super) dot_receiver: Option<&'a ast::Expr>, + /// If this is a method call in particular, i.e. the () are already there. + pub(super) is_method_call: bool, +} + +impl<'a> CompletionContext<'a> { + pub(super) fn new( + db: &'a db::RootDatabase, + original_file: &'a SourceFile, + position: FilePosition, + ) -> Cancelable>> { + let module = source_binder::module_from_position(db, position)?; + let leaf = + ctry!(find_leaf_at_offset(original_file.syntax(), position.offset).left_biased()); + let mut ctx = CompletionContext { + db, + leaf, + offset: position.offset, + module, + function: None, + function_syntax: None, + use_item_syntax: None, + is_param: false, + is_trivial_path: false, + path_prefix: None, + after_if: false, + can_be_stmt: false, + is_new_item: false, + dot_receiver: None, + is_method_call: false, + }; + ctx.fill(original_file, position.offset); + Ok(Some(ctx)) + } + + fn fill(&mut self, original_file: &'a SourceFile, offset: TextUnit) { + // Insert a fake ident to get a valid parse tree. We will use this file + // to determine context, though the original_file will be used for + // actual completion. + let file = { + let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string()); + original_file.reparse(&edit) + }; + + // First, let's try to complete a reference to some declaration. + if let Some(name_ref) = find_node_at_offset::(file.syntax(), offset) { + // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. + // See RFC#1685. + if is_node::(name_ref.syntax()) { + self.is_param = true; + return; + } + self.classify_name_ref(original_file, name_ref); + } + + // Otherwise, see if this is a declaration. We can use heuristics to + // suggest declaration names, see `CompletionKind::Magic`. + if let Some(name) = find_node_at_offset::(file.syntax(), offset) { + if is_node::(name.syntax()) { + self.is_param = true; + return; + } + } + } + fn classify_name_ref(&mut self, original_file: &'a SourceFile, name_ref: &ast::NameRef) { + let name_range = name_ref.syntax().range(); + let top_node = name_ref + .syntax() + .ancestors() + .take_while(|it| it.range() == name_range) + .last() + .unwrap(); + + match top_node.parent().map(|it| it.kind()) { + Some(SOURCE_FILE) | Some(ITEM_LIST) => { + self.is_new_item = true; + return; + } + _ => (), + } + + self.use_item_syntax = self.leaf.ancestors().find_map(ast::UseItem::cast); + + self.function_syntax = self + .leaf + .ancestors() + .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) + .find_map(ast::FnDef::cast); + match (&self.module, self.function_syntax) { + (Some(module), Some(fn_def)) => { + let function = source_binder::function_from_module(self.db, module, fn_def); + self.function = Some(function); + } + _ => (), + } + + let parent = match name_ref.syntax().parent() { + Some(it) => it, + None => return, + }; + if let Some(segment) = ast::PathSegment::cast(parent) { + let path = segment.parent_path(); + if let Some(mut path) = hir::Path::from_ast(path) { + if !path.is_ident() { + path.segments.pop().unwrap(); + self.path_prefix = Some(path); + return; + } + } + if path.qualifier().is_none() { + self.is_trivial_path = true; + + // Find either enclosing expr statement (thing with `;`) or a + // block. If block, check that we are the last expr. + self.can_be_stmt = name_ref + .syntax() + .ancestors() + .find_map(|node| { + if let Some(stmt) = ast::ExprStmt::cast(node) { + return Some(stmt.syntax().range() == name_ref.syntax().range()); + } + if let Some(block) = ast::Block::cast(node) { + return Some( + block.expr().map(|e| e.syntax().range()) + == Some(name_ref.syntax().range()), + ); + } + None + }) + .unwrap_or(false); + + if let Some(off) = name_ref.syntax().range().start().checked_sub(2.into()) { + if let Some(if_expr) = + find_node_at_offset::(original_file.syntax(), off) + { + if if_expr.syntax().range().end() < name_ref.syntax().range().start() { + self.after_if = true; + } + } + } + } + } + if let Some(field_expr) = ast::FieldExpr::cast(parent) { + // The receiver comes before the point of insertion of the fake + // ident, so it should have the same range in the non-modified file + self.dot_receiver = field_expr + .expr() + .map(|e| e.syntax().range()) + .and_then(|r| find_node_with_range(original_file.syntax(), r)); + } + if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent) { + // As above + self.dot_receiver = method_call_expr + .expr() + .map(|e| e.syntax().range()) + .and_then(|r| find_node_with_range(original_file.syntax(), r)); + self.is_method_call = true; + } + } +} + +fn find_node_with_range(syntax: &SyntaxNode, range: TextRange) -> Option<&N> { + let node = find_covering_node(syntax, range); + node.ancestors().find_map(N::cast) +} + +fn is_node(node: &SyntaxNode) -> bool { + match node.ancestors().filter_map(N::cast).next() { + None => false, + Some(n) => n.syntax().range() == node.range(), + } +} diff --git a/crates/ra_ide_api/src/completion/completion_item.rs b/crates/ra_ide_api/src/completion/completion_item.rs new file mode 100644 index 000000000..a25b87bee --- /dev/null +++ b/crates/ra_ide_api/src/completion/completion_item.rs @@ -0,0 +1,244 @@ +use hir::PerNs; + +use crate::completion::CompletionContext; + +/// `CompletionItem` describes a single completion variant in the editor pop-up. +/// It is basically a POD with various properties. To construct a +/// `CompletionItem`, use `new` method and the `Builder` struct. +#[derive(Debug)] +pub struct CompletionItem { + /// Used only internally in tests, to check only specific kind of + /// completion. + completion_kind: CompletionKind, + label: String, + lookup: Option, + snippet: Option, + kind: Option, +} + +pub enum InsertText { + PlainText { text: String }, + Snippet { text: String }, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum CompletionItemKind { + Snippet, + Keyword, + Module, + Function, + Struct, + Enum, + EnumVariant, + Binding, + Field, +} + +#[derive(Debug, PartialEq, Eq)] +pub(crate) enum CompletionKind { + /// Parser-based keyword completion. + Keyword, + /// Your usual "complete all valid identifiers". + Reference, + /// "Secret sauce" completions. + Magic, + Snippet, +} + +impl CompletionItem { + pub(crate) fn new(completion_kind: CompletionKind, label: impl Into) -> Builder { + let label = label.into(); + Builder { + completion_kind, + label, + lookup: None, + snippet: None, + kind: None, + } + } + /// What user sees in pop-up in the UI. + pub fn label(&self) -> &str { + &self.label + } + /// What string is used for filtering. + pub fn lookup(&self) -> &str { + self.lookup + .as_ref() + .map(|it| it.as_str()) + .unwrap_or(self.label()) + } + /// What is inserted. + pub fn insert_text(&self) -> InsertText { + match &self.snippet { + None => InsertText::PlainText { + text: self.label.clone(), + }, + Some(it) => InsertText::Snippet { text: it.clone() }, + } + } + + pub fn kind(&self) -> Option { + self.kind + } +} + +/// A helper to make `CompletionItem`s. +#[must_use] +pub(crate) struct Builder { + completion_kind: CompletionKind, + label: String, + lookup: Option, + snippet: Option, + kind: Option, +} + +impl Builder { + pub(crate) fn add_to(self, acc: &mut Completions) { + acc.add(self.build()) + } + + pub(crate) fn build(self) -> CompletionItem { + CompletionItem { + label: self.label, + lookup: self.lookup, + snippet: self.snippet, + kind: self.kind, + completion_kind: self.completion_kind, + } + } + pub(crate) fn lookup_by(mut self, lookup: impl Into) -> Builder { + self.lookup = Some(lookup.into()); + self + } + pub(crate) fn snippet(mut self, snippet: impl Into) -> Builder { + self.snippet = Some(snippet.into()); + self + } + pub(crate) fn kind(mut self, kind: CompletionItemKind) -> Builder { + self.kind = Some(kind); + self + } + pub(super) fn from_resolution( + mut self, + ctx: &CompletionContext, + resolution: &hir::Resolution, + ) -> Builder { + let resolved = resolution.def_id.and_then(|d| d.resolve(ctx.db).ok()); + let kind = match resolved { + PerNs { + types: Some(hir::Def::Module(..)), + .. + } => CompletionItemKind::Module, + PerNs { + types: Some(hir::Def::Struct(..)), + .. + } => CompletionItemKind::Struct, + PerNs { + types: Some(hir::Def::Enum(..)), + .. + } => CompletionItemKind::Enum, + PerNs { + values: Some(hir::Def::Function(function)), + .. + } => return self.from_function(ctx, function), + _ => return self, + }; + self.kind = Some(kind); + self + } + + fn from_function(mut self, ctx: &CompletionContext, function: hir::Function) -> Builder { + // If not an import, add parenthesis automatically. + if ctx.use_item_syntax.is_none() { + if function.signature(ctx.db).args().is_empty() { + self.snippet = Some(format!("{}()$0", self.label)); + } else { + self.snippet = Some(format!("{}($0)", self.label)); + } + } + self.kind = Some(CompletionItemKind::Function); + self + } +} + +impl Into for Builder { + fn into(self) -> CompletionItem { + self.build() + } +} + +/// Represents an in-progress set of completions being built. +#[derive(Debug, Default)] +pub(crate) struct Completions { + buf: Vec, +} + +impl Completions { + pub(crate) fn add(&mut self, item: impl Into) { + self.buf.push(item.into()) + } + pub(crate) fn add_all(&mut self, items: I) + where + I: IntoIterator, + I::Item: Into, + { + items.into_iter().for_each(|item| self.add(item.into())) + } + + #[cfg(test)] + pub(crate) fn assert_match(&self, expected: &str, kind: CompletionKind) { + let expected = normalize(expected); + let actual = self.debug_render(kind); + test_utils::assert_eq_text!(expected.as_str(), actual.as_str(),); + + /// Normalize the textual representation of `Completions`: + /// replace `;` with newlines, normalize whitespace + fn normalize(expected: &str) -> String { + use ra_syntax::{tokenize, TextUnit, TextRange, SyntaxKind::SEMI}; + let mut res = String::new(); + for line in expected.trim().lines() { + let line = line.trim(); + let mut start_offset: TextUnit = 0.into(); + // Yep, we use rust tokenize in completion tests :-) + for token in tokenize(line) { + let range = TextRange::offset_len(start_offset, token.len); + start_offset += token.len; + if token.kind == SEMI { + res.push('\n'); + } else { + res.push_str(&line[range]); + } + } + + res.push('\n'); + } + res + } + } + + #[cfg(test)] + fn debug_render(&self, kind: CompletionKind) -> String { + let mut res = String::new(); + for c in self.buf.iter() { + if c.completion_kind == kind { + if let Some(lookup) = &c.lookup { + res.push_str(lookup); + res.push_str(&format!(" {:?}", c.label)); + } else { + res.push_str(&c.label); + } + if let Some(snippet) = &c.snippet { + res.push_str(&format!(" {:?}", snippet)); + } + res.push('\n'); + } + } + res + } +} + +impl Into> for Completions { + fn into(self) -> Vec { + self.buf + } +} diff --git a/crates/ra_ide_api/src/db.rs b/crates/ra_ide_api/src/db.rs new file mode 100644 index 000000000..9d46609ec --- /dev/null +++ b/crates/ra_ide_api/src/db.rs @@ -0,0 +1,128 @@ +use std::{fmt, sync::Arc}; + +use salsa::{self, Database}; +use ra_db::{LocationIntener, BaseDatabase, FileId}; + +use crate::{symbol_index, LineIndex}; + +#[derive(Debug)] +pub(crate) struct RootDatabase { + runtime: salsa::Runtime, + id_maps: Arc, +} + +#[derive(Default)] +struct IdMaps { + defs: LocationIntener, + macros: LocationIntener, +} + +impl fmt::Debug for IdMaps { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("IdMaps") + .field("n_defs", &self.defs.len()) + .finish() + } +} + +impl salsa::Database for RootDatabase { + fn salsa_runtime(&self) -> &salsa::Runtime { + &self.runtime + } +} + +impl Default for RootDatabase { + fn default() -> RootDatabase { + let mut db = RootDatabase { + runtime: salsa::Runtime::default(), + id_maps: Default::default(), + }; + db.query_mut(ra_db::CrateGraphQuery) + .set((), Default::default()); + db.query_mut(ra_db::LocalRootsQuery) + .set((), Default::default()); + db.query_mut(ra_db::LibraryRootsQuery) + .set((), Default::default()); + db + } +} + +impl salsa::ParallelDatabase for RootDatabase { + fn snapshot(&self) -> salsa::Snapshot { + salsa::Snapshot::new(RootDatabase { + runtime: self.runtime.snapshot(self), + id_maps: self.id_maps.clone(), + }) + } +} + +impl BaseDatabase for RootDatabase {} + +impl AsRef> for RootDatabase { + fn as_ref(&self) -> &LocationIntener { + &self.id_maps.defs + } +} + +impl AsRef> for RootDatabase { + fn as_ref(&self) -> &LocationIntener { + &self.id_maps.macros + } +} + +salsa::query_group! { + pub(crate) trait LineIndexDatabase: ra_db::FilesDatabase + BaseDatabase { + fn line_index(file_id: FileId) -> Arc { + type LineIndexQuery; + } + } +} + +fn line_index(db: &impl ra_db::FilesDatabase, file_id: FileId) -> Arc { + let text = db.file_text(file_id); + Arc::new(LineIndex::new(&*text)) +} + +salsa::database_storage! { + pub(crate) struct RootDatabaseStorage for RootDatabase { + impl ra_db::FilesDatabase { + fn file_text() for ra_db::FileTextQuery; + fn file_relative_path() for ra_db::FileRelativePathQuery; + fn file_source_root() for ra_db::FileSourceRootQuery; + fn source_root() for ra_db::SourceRootQuery; + fn local_roots() for ra_db::LocalRootsQuery; + fn library_roots() for ra_db::LibraryRootsQuery; + fn crate_graph() for ra_db::CrateGraphQuery; + } + impl ra_db::SyntaxDatabase { + fn source_file() for ra_db::SourceFileQuery; + } + impl LineIndexDatabase { + fn line_index() for LineIndexQuery; + } + impl symbol_index::SymbolsDatabase { + fn file_symbols() for symbol_index::FileSymbolsQuery; + fn library_symbols() for symbol_index::LibrarySymbolsQuery; + } + impl hir::db::HirDatabase { + fn hir_source_file() for hir::db::HirSourceFileQuery; + fn expand_macro_invocation() for hir::db::ExpandMacroCallQuery; + fn module_tree() for hir::db::ModuleTreeQuery; + fn fn_scopes() for hir::db::FnScopesQuery; + fn file_items() for hir::db::SourceFileItemsQuery; + fn file_item() for hir::db::FileItemQuery; + fn input_module_items() for hir::db::InputModuleItemsQuery; + fn item_map() for hir::db::ItemMapQuery; + fn submodules() for hir::db::SubmodulesQuery; + fn infer() for hir::db::InferQuery; + fn type_for_def() for hir::db::TypeForDefQuery; + fn type_for_field() for hir::db::TypeForFieldQuery; + fn struct_data() for hir::db::StructDataQuery; + fn enum_data() for hir::db::EnumDataQuery; + fn impls_in_module() for hir::db::ImplsInModuleQuery; + fn body_hir() for hir::db::BodyHirQuery; + fn body_syntax_mapping() for hir::db::BodySyntaxMappingQuery; + fn fn_signature() for hir::db::FnSignatureQuery; + } + } +} diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs new file mode 100644 index 000000000..c3c809c9f --- /dev/null +++ b/crates/ra_ide_api/src/extend_selection.rs @@ -0,0 +1,56 @@ +use ra_db::SyntaxDatabase; +use ra_syntax::{ + SyntaxNode, AstNode, SourceFile, + ast, algo::find_covering_node, +}; + +use crate::{ + TextRange, FileRange, + db::RootDatabase, +}; + +pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { + let source_file = db.source_file(frange.file_id); + if let Some(range) = extend_selection_in_macro(db, &source_file, frange) { + return range; + } + ra_ide_api_light::extend_selection(source_file.syntax(), frange.range).unwrap_or(frange.range) +} + +fn extend_selection_in_macro( + _db: &RootDatabase, + source_file: &SourceFile, + frange: FileRange, +) -> Option { + let macro_call = find_macro_call(source_file.syntax(), frange.range)?; + let (off, exp) = hir::MacroDef::ast_expand(macro_call)?; + let dst_range = exp.map_range_forward(frange.range - off)?; + let dst_range = ra_ide_api_light::extend_selection(&exp.syntax(), dst_range)?; + let src_range = exp.map_range_back(dst_range)? + off; + Some(src_range) +} + +fn find_macro_call(node: &SyntaxNode, range: TextRange) -> Option<&ast::MacroCall> { + find_covering_node(node, range) + .ancestors() + .find_map(ast::MacroCall::cast) +} + +#[cfg(test)] +mod tests { + use crate::mock_analysis::single_file_with_range; + use test_utils::assert_eq_dbg; + + #[test] + fn extend_selection_inside_macros() { + let (analysis, frange) = single_file_with_range( + " + fn main() { + ctry!(foo(|x| <|>x<|>)); + } + ", + ); + let r = analysis.extend_selection(frange); + assert_eq_dbg("[51; 56)", &r); + } +} diff --git a/crates/ra_ide_api/src/goto_defenition.rs b/crates/ra_ide_api/src/goto_defenition.rs new file mode 100644 index 000000000..fcd8d315e --- /dev/null +++ b/crates/ra_ide_api/src/goto_defenition.rs @@ -0,0 +1,139 @@ +use ra_db::{FileId, Cancelable, SyntaxDatabase}; +use ra_syntax::{ + TextRange, AstNode, ast, SyntaxKind::{NAME, MODULE}, + algo::find_node_at_offset, +}; + +use crate::{FilePosition, NavigationTarget, db::RootDatabase}; + +pub(crate) fn goto_defenition( + db: &RootDatabase, + position: FilePosition, +) -> Cancelable>> { + let file = db.source_file(position.file_id); + let syntax = file.syntax(); + if let Some(name_ref) = find_node_at_offset::(syntax, position.offset) { + return Ok(Some(reference_defenition(db, position.file_id, name_ref)?)); + } + if let Some(name) = find_node_at_offset::(syntax, position.offset) { + return name_defenition(db, position.file_id, name); + } + Ok(None) +} + +pub(crate) fn reference_defenition( + db: &RootDatabase, + file_id: FileId, + name_ref: &ast::NameRef, +) -> Cancelable> { + if let Some(fn_descr) = + hir::source_binder::function_from_child_node(db, file_id, name_ref.syntax())? + { + let scope = fn_descr.scopes(db)?; + // First try to resolve the symbol locally + if let Some(entry) = scope.resolve_local_name(name_ref) { + let nav = NavigationTarget { + file_id, + name: entry.name().to_string().into(), + range: entry.ptr().range(), + kind: NAME, + ptr: None, + }; + return Ok(vec![nav]); + }; + } + // If that fails try the index based approach. + let navs = db + .index_resolve(name_ref)? + .into_iter() + .map(NavigationTarget::from_symbol) + .collect(); + Ok(navs) +} + +fn name_defenition( + db: &RootDatabase, + file_id: FileId, + name: &ast::Name, +) -> Cancelable>> { + if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) { + if module.has_semi() { + if let Some(child_module) = + hir::source_binder::module_from_declaration(db, file_id, module)? + { + let (file_id, _) = child_module.defenition_source(db)?; + let name = match child_module.name(db)? { + Some(name) => name.to_string().into(), + None => "".into(), + }; + let nav = NavigationTarget { + file_id, + name, + range: TextRange::offset_len(0.into(), 0.into()), + kind: MODULE, + ptr: None, + }; + return Ok(Some(vec![nav])); + } + } + } + Ok(None) +} + +#[cfg(test)] +mod tests { + use test_utils::assert_eq_dbg; + use crate::mock_analysis::analysis_and_position; + + #[test] + fn goto_defenition_works_in_items() { + let (analysis, pos) = analysis_and_position( + " + //- /lib.rs + struct Foo; + enum E { X(Foo<|>) } + ", + ); + + let symbols = analysis.goto_defenition(pos).unwrap().unwrap(); + assert_eq_dbg( + r#"[NavigationTarget { file_id: FileId(1), name: "Foo", + kind: STRUCT_DEF, range: [0; 11), + ptr: Some(LocalSyntaxPtr { range: [0; 11), kind: STRUCT_DEF }) }]"#, + &symbols, + ); + } + + #[test] + fn goto_defenition_works_for_module_declaration() { + let (analysis, pos) = analysis_and_position( + " + //- /lib.rs + mod <|>foo; + //- /foo.rs + // empty + ", + ); + + let symbols = analysis.goto_defenition(pos).unwrap().unwrap(); + assert_eq_dbg( + r#"[NavigationTarget { file_id: FileId(2), name: "foo", kind: MODULE, range: [0; 0), ptr: None }]"#, + &symbols, + ); + + let (analysis, pos) = analysis_and_position( + " + //- /lib.rs + mod <|>foo; + //- /foo/mod.rs + // empty + ", + ); + + let symbols = analysis.goto_defenition(pos).unwrap().unwrap(); + assert_eq_dbg( + r#"[NavigationTarget { file_id: FileId(2), name: "foo", kind: MODULE, range: [0; 0), ptr: None }]"#, + &symbols, + ); + } +} diff --git a/crates/ra_ide_api/src/hover.rs b/crates/ra_ide_api/src/hover.rs new file mode 100644 index 000000000..475524ee1 --- /dev/null +++ b/crates/ra_ide_api/src/hover.rs @@ -0,0 +1,257 @@ +use ra_db::{Cancelable, SyntaxDatabase}; +use ra_syntax::{ + AstNode, SyntaxNode, TreePtr, + ast::{self, NameOwner}, + algo::{find_covering_node, find_node_at_offset, find_leaf_at_offset, visit::{visitor, Visitor}}, +}; + +use crate::{db::RootDatabase, RangeInfo, FilePosition, FileRange, NavigationTarget}; + +pub(crate) fn hover( + db: &RootDatabase, + position: FilePosition, +) -> Cancelable>> { + let file = db.source_file(position.file_id); + let mut res = Vec::new(); + + let mut range = None; + if let Some(name_ref) = find_node_at_offset::(file.syntax(), position.offset) { + let navs = crate::goto_defenition::reference_defenition(db, position.file_id, name_ref)?; + for nav in navs { + res.extend(doc_text_for(db, nav)?) + } + if !res.is_empty() { + range = Some(name_ref.syntax().range()) + } + } + if range.is_none() { + let node = find_leaf_at_offset(file.syntax(), position.offset).find_map(|leaf| { + leaf.ancestors() + .find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some()) + }); + let node = ctry!(node); + let frange = FileRange { + file_id: position.file_id, + range: node.range(), + }; + res.extend(type_of(db, frange)?); + range = Some(node.range()); + }; + + let range = ctry!(range); + if res.is_empty() { + return Ok(None); + } + let res = RangeInfo::new(range, res.join("\n\n---\n")); + Ok(Some(res)) +} + +pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Cancelable> { + let file = db.source_file(frange.file_id); + let syntax = file.syntax(); + let leaf_node = find_covering_node(syntax, frange.range); + // if we picked identifier, expand to pattern/expression + let node = leaf_node + .ancestors() + .take_while(|it| it.range() == leaf_node.range()) + .find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some()) + .unwrap_or(leaf_node); + let parent_fn = ctry!(node.ancestors().find_map(ast::FnDef::cast)); + let function = ctry!(hir::source_binder::function_from_source( + db, + frange.file_id, + parent_fn + )?); + let infer = function.infer(db)?; + let syntax_mapping = function.body_syntax_mapping(db)?; + if let Some(expr) = ast::Expr::cast(node).and_then(|e| syntax_mapping.node_expr(e)) { + Ok(Some(infer[expr].to_string())) + } else if let Some(pat) = ast::Pat::cast(node).and_then(|p| syntax_mapping.node_pat(p)) { + Ok(Some(infer[pat].to_string())) + } else { + Ok(None) + } +} + +// FIXME: this should not really use navigation target. Rather, approximatelly +// resovled symbol should return a `DefId`. +fn doc_text_for(db: &RootDatabase, nav: NavigationTarget) -> Cancelable> { + let result = match (nav.description(db), nav.docs(db)) { + (Some(desc), Some(docs)) => Some("```rust\n".to_string() + &*desc + "\n```\n\n" + &*docs), + (Some(desc), None) => Some("```rust\n".to_string() + &*desc + "\n```"), + (None, Some(docs)) => Some(docs), + _ => None, + }; + + Ok(result) +} + +impl NavigationTarget { + fn node(&self, db: &RootDatabase) -> Option> { + let source_file = db.source_file(self.file_id); + let source_file = source_file.syntax(); + let node = source_file + .descendants() + .find(|node| node.kind() == self.kind && node.range() == self.range)? + .to_owned(); + Some(node) + } + + fn docs(&self, db: &RootDatabase) -> Option { + let node = self.node(db)?; + fn doc_comments(node: &N) -> Option { + let comments = node.doc_comment_text(); + if comments.is_empty() { + None + } else { + Some(comments) + } + } + + visitor() + .visit(doc_comments::) + .visit(doc_comments::) + .visit(doc_comments::) + .visit(doc_comments::) + .visit(doc_comments::) + .visit(doc_comments::) + .visit(doc_comments::) + .visit(doc_comments::) + .accept(&node)? + } + + /// Get a description of this node. + /// + /// e.g. `struct Name`, `enum Name`, `fn Name` + fn description(&self, db: &RootDatabase) -> Option { + // TODO: After type inference is done, add type information to improve the output + let node = self.node(db)?; + // TODO: Refactor to be have less repetition + visitor() + .visit(|node: &ast::FnDef| { + let mut string = "fn ".to_string(); + node.name()?.syntax().text().push_to(&mut string); + Some(string) + }) + .visit(|node: &ast::StructDef| { + let mut string = "struct ".to_string(); + node.name()?.syntax().text().push_to(&mut string); + Some(string) + }) + .visit(|node: &ast::EnumDef| { + let mut string = "enum ".to_string(); + node.name()?.syntax().text().push_to(&mut string); + Some(string) + }) + .visit(|node: &ast::TraitDef| { + let mut string = "trait ".to_string(); + node.name()?.syntax().text().push_to(&mut string); + Some(string) + }) + .visit(|node: &ast::Module| { + let mut string = "mod ".to_string(); + node.name()?.syntax().text().push_to(&mut string); + Some(string) + }) + .visit(|node: &ast::TypeDef| { + let mut string = "type ".to_string(); + node.name()?.syntax().text().push_to(&mut string); + Some(string) + }) + .visit(|node: &ast::ConstDef| { + let mut string = "const ".to_string(); + node.name()?.syntax().text().push_to(&mut string); + Some(string) + }) + .visit(|node: &ast::StaticDef| { + let mut string = "static ".to_string(); + node.name()?.syntax().text().push_to(&mut string); + Some(string) + }) + .accept(&node)? + } +} + +#[cfg(test)] +mod tests { + use ra_syntax::TextRange; + use crate::mock_analysis::{single_file_with_position, single_file_with_range}; + + #[test] + fn hover_shows_type_of_an_expression() { + let (analysis, position) = single_file_with_position( + " + pub fn foo() -> u32 { 1 } + + fn main() { + let foo_test = foo()<|>; + } + ", + ); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(hover.range, TextRange::from_to(95.into(), 100.into())); + assert_eq!(hover.info, "u32"); + } + + #[test] + fn hover_for_local_variable() { + let (analysis, position) = single_file_with_position("fn func(foo: i32) { fo<|>o; }"); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(hover.info, "i32"); + } + + #[test] + fn hover_for_local_variable_pat() { + let (analysis, position) = single_file_with_position("fn func(fo<|>o: i32) {}"); + let hover = analysis.hover(position).unwrap().unwrap(); + assert_eq!(hover.info, "i32"); + } + + #[test] + fn test_type_of_for_function() { + let (analysis, range) = single_file_with_range( + " + pub fn foo() -> u32 { 1 }; + + fn main() { + let foo_test = <|>foo()<|>; + } + ", + ); + + let type_name = analysis.type_of(range).unwrap().unwrap(); + assert_eq!("u32", &type_name); + } + + // FIXME: improve type_of to make this work + #[test] + fn test_type_of_for_expr_1() { + let (analysis, range) = single_file_with_range( + " + fn main() { + let foo = <|>1 + foo_test<|>; + } + ", + ); + + let type_name = analysis.type_of(range).unwrap().unwrap(); + assert_eq!("[unknown]", &type_name); + } + + // FIXME: improve type_of to make this work + #[test] + fn test_type_of_for_expr_2() { + let (analysis, range) = single_file_with_range( + " + fn main() { + let foo: usize = 1; + let bar = <|>1 + foo_test<|>; + } + ", + ); + + let type_name = analysis.type_of(range).unwrap().unwrap(); + assert_eq!("[unknown]", &type_name); + } + +} diff --git a/crates/ra_ide_api/src/imp.rs b/crates/ra_ide_api/src/imp.rs new file mode 100644 index 000000000..7c60ab7d6 --- /dev/null +++ b/crates/ra_ide_api/src/imp.rs @@ -0,0 +1,309 @@ +use std::sync::Arc; + +use salsa::Database; + +use hir::{ + self, Problem, source_binder, +}; +use ra_db::{FilesDatabase, SourceRoot, SourceRootId, SyntaxDatabase}; +use ra_ide_api_light::{self, assists, LocalEdit, Severity}; +use ra_syntax::{ + TextRange, AstNode, SourceFile, + ast::{self, NameOwner}, + algo::find_node_at_offset, + SyntaxKind::*, +}; + +use crate::{ + AnalysisChange, + Cancelable, NavigationTarget, + CrateId, db, Diagnostic, FileId, FilePosition, FileRange, FileSystemEdit, + Query, RootChange, SourceChange, SourceFileEdit, + symbol_index::{LibrarySymbolsQuery, FileSymbol}, +}; + +impl db::RootDatabase { + pub(crate) fn apply_change(&mut self, change: AnalysisChange) { + log::info!("apply_change {:?}", change); + // self.gc_syntax_trees(); + if !change.new_roots.is_empty() { + let mut local_roots = Vec::clone(&self.local_roots()); + for (root_id, is_local) in change.new_roots { + self.query_mut(ra_db::SourceRootQuery) + .set(root_id, Default::default()); + if is_local { + local_roots.push(root_id); + } + } + self.query_mut(ra_db::LocalRootsQuery) + .set((), Arc::new(local_roots)); + } + + for (root_id, root_change) in change.roots_changed { + self.apply_root_change(root_id, root_change); + } + for (file_id, text) in change.files_changed { + self.query_mut(ra_db::FileTextQuery).set(file_id, text) + } + if !change.libraries_added.is_empty() { + let mut libraries = Vec::clone(&self.library_roots()); + for library in change.libraries_added { + libraries.push(library.root_id); + self.query_mut(ra_db::SourceRootQuery) + .set(library.root_id, Default::default()); + self.query_mut(LibrarySymbolsQuery) + .set_constant(library.root_id, Arc::new(library.symbol_index)); + self.apply_root_change(library.root_id, library.root_change); + } + self.query_mut(ra_db::LibraryRootsQuery) + .set((), Arc::new(libraries)); + } + if let Some(crate_graph) = change.crate_graph { + self.query_mut(ra_db::CrateGraphQuery) + .set((), Arc::new(crate_graph)) + } + } + + fn apply_root_change(&mut self, root_id: SourceRootId, root_change: RootChange) { + let mut source_root = SourceRoot::clone(&self.source_root(root_id)); + for add_file in root_change.added { + self.query_mut(ra_db::FileTextQuery) + .set(add_file.file_id, add_file.text); + self.query_mut(ra_db::FileRelativePathQuery) + .set(add_file.file_id, add_file.path.clone()); + self.query_mut(ra_db::FileSourceRootQuery) + .set(add_file.file_id, root_id); + source_root.files.insert(add_file.path, add_file.file_id); + } + for remove_file in root_change.removed { + self.query_mut(ra_db::FileTextQuery) + .set(remove_file.file_id, Default::default()); + source_root.files.remove(&remove_file.path); + } + self.query_mut(ra_db::SourceRootQuery) + .set(root_id, Arc::new(source_root)); + } + + #[allow(unused)] + /// Ideally, we should call this function from time to time to collect heavy + /// syntax trees. However, if we actually do that, everything is recomputed + /// for some reason. Needs investigation. + fn gc_syntax_trees(&mut self) { + self.query(ra_db::SourceFileQuery) + .sweep(salsa::SweepStrategy::default().discard_values()); + self.query(hir::db::SourceFileItemsQuery) + .sweep(salsa::SweepStrategy::default().discard_values()); + self.query(hir::db::FileItemQuery) + .sweep(salsa::SweepStrategy::default().discard_values()); + } +} + +impl db::RootDatabase { + /// This returns `Vec` because a module may be included from several places. We + /// don't handle this case yet though, so the Vec has length at most one. + pub(crate) fn parent_module( + &self, + position: FilePosition, + ) -> Cancelable> { + let module = match source_binder::module_from_position(self, position)? { + None => return Ok(Vec::new()), + Some(it) => it, + }; + let (file_id, ast_module) = match module.declaration_source(self)? { + None => return Ok(Vec::new()), + Some(it) => it, + }; + let name = ast_module.name().unwrap(); + Ok(vec![NavigationTarget { + file_id, + name: name.text().clone(), + range: name.syntax().range(), + kind: MODULE, + ptr: None, + }]) + } + /// Returns `Vec` for the same reason as `parent_module` + pub(crate) fn crate_for(&self, file_id: FileId) -> Cancelable> { + let module = match source_binder::module_from_file_id(self, file_id)? { + Some(it) => it, + None => return Ok(Vec::new()), + }; + let krate = match module.krate(self)? { + Some(it) => it, + None => return Ok(Vec::new()), + }; + Ok(vec![krate.crate_id()]) + } + pub(crate) fn find_all_refs( + &self, + position: FilePosition, + ) -> Cancelable> { + let file = self.source_file(position.file_id); + // Find the binding associated with the offset + let (binding, descr) = match find_binding(self, &file, position)? { + None => return Ok(Vec::new()), + Some(it) => it, + }; + + let mut ret = binding + .name() + .into_iter() + .map(|name| (position.file_id, name.syntax().range())) + .collect::>(); + ret.extend( + descr + .scopes(self)? + .find_all_refs(binding) + .into_iter() + .map(|ref_desc| (position.file_id, ref_desc.range)), + ); + + return Ok(ret); + + fn find_binding<'a>( + db: &db::RootDatabase, + source_file: &'a SourceFile, + position: FilePosition, + ) -> Cancelable> { + let syntax = source_file.syntax(); + if let Some(binding) = find_node_at_offset::(syntax, position.offset) { + let descr = ctry!(source_binder::function_from_child_node( + db, + position.file_id, + binding.syntax(), + )?); + return Ok(Some((binding, descr))); + }; + let name_ref = ctry!(find_node_at_offset::(syntax, position.offset)); + let descr = ctry!(source_binder::function_from_child_node( + db, + position.file_id, + name_ref.syntax(), + )?); + let scope = descr.scopes(db)?; + let resolved = ctry!(scope.resolve_local_name(name_ref)); + let resolved = resolved.ptr().resolve(source_file); + let binding = ctry!(find_node_at_offset::( + syntax, + resolved.range().end() + )); + Ok(Some((binding, descr))) + } + } + + pub(crate) fn diagnostics(&self, file_id: FileId) -> Cancelable> { + let syntax = self.source_file(file_id); + + let mut res = ra_ide_api_light::diagnostics(&syntax) + .into_iter() + .map(|d| Diagnostic { + range: d.range, + message: d.msg, + severity: d.severity, + fix: d.fix.map(|fix| SourceChange::from_local_edit(file_id, fix)), + }) + .collect::>(); + if let Some(m) = source_binder::module_from_file_id(self, file_id)? { + for (name_node, problem) in m.problems(self)? { + let source_root = self.file_source_root(file_id); + let diag = match problem { + Problem::UnresolvedModule { candidate } => { + let create_file = FileSystemEdit::CreateFile { + source_root, + path: candidate.clone(), + }; + let fix = SourceChange { + label: "create module".to_string(), + source_file_edits: Vec::new(), + file_system_edits: vec![create_file], + cursor_position: None, + }; + Diagnostic { + range: name_node.range(), + message: "unresolved module".to_string(), + severity: Severity::Error, + fix: Some(fix), + } + } + Problem::NotDirOwner { move_to, candidate } => { + let move_file = FileSystemEdit::MoveFile { + src: file_id, + dst_source_root: source_root, + dst_path: move_to.clone(), + }; + let create_file = FileSystemEdit::CreateFile { + source_root, + path: move_to.join(candidate), + }; + let fix = SourceChange { + label: "move file and create module".to_string(), + source_file_edits: Vec::new(), + file_system_edits: vec![move_file, create_file], + cursor_position: None, + }; + Diagnostic { + range: name_node.range(), + message: "can't declare module at this location".to_string(), + severity: Severity::Error, + fix: Some(fix), + } + } + }; + res.push(diag) + } + }; + Ok(res) + } + + pub(crate) fn assists(&self, frange: FileRange) -> Vec { + let file = self.source_file(frange.file_id); + assists::assists(&file, frange.range) + .into_iter() + .map(|local_edit| SourceChange::from_local_edit(frange.file_id, local_edit)) + .collect() + } + + pub(crate) fn rename( + &self, + position: FilePosition, + new_name: &str, + ) -> Cancelable> { + let res = self + .find_all_refs(position)? + .iter() + .map(|(file_id, text_range)| SourceFileEdit { + file_id: *file_id, + edit: { + let mut builder = ra_text_edit::TextEditBuilder::default(); + builder.replace(*text_range, new_name.into()); + builder.finish() + }, + }) + .collect::>(); + Ok(res) + } + pub(crate) fn index_resolve(&self, name_ref: &ast::NameRef) -> Cancelable> { + let name = name_ref.text(); + let mut query = Query::new(name.to_string()); + query.exact(); + query.limit(4); + crate::symbol_index::world_symbols(self, query) + } +} + +impl SourceChange { + pub(crate) fn from_local_edit(file_id: FileId, edit: LocalEdit) -> SourceChange { + let file_edit = SourceFileEdit { + file_id, + edit: edit.edit, + }; + SourceChange { + label: edit.label, + source_file_edits: vec![file_edit], + file_system_edits: vec![], + cursor_position: edit + .cursor_position + .map(|offset| FilePosition { offset, file_id }), + } + } +} diff --git a/crates/ra_ide_api/src/lib.rs b/crates/ra_ide_api/src/lib.rs new file mode 100644 index 000000000..183e36706 --- /dev/null +++ b/crates/ra_ide_api/src/lib.rs @@ -0,0 +1,509 @@ +//! ra_analyzer crate provides "ide-centric" APIs for the rust-analyzer. What +//! powers this API are the `RootDatabase` struct, which defines a `salsa` +//! database, and the `ra_hir` crate, where majority of the analysis happens. +//! However, IDE specific bits of the analysis (most notably completion) happen +//! in this crate. +macro_rules! ctry { + ($expr:expr) => { + match $expr { + None => return Ok(None), + Some(it) => it, + } + }; +} + +mod completion; +mod db; +mod goto_defenition; +mod imp; +pub mod mock_analysis; +mod runnables; +mod symbol_index; + +mod extend_selection; +mod hover; +mod call_info; +mod syntax_highlighting; + +use std::{fmt, sync::Arc}; + +use ra_syntax::{SmolStr, SourceFile, TreePtr, SyntaxKind, TextRange, TextUnit}; +use ra_text_edit::TextEdit; +use ra_db::{SyntaxDatabase, FilesDatabase, LocalSyntaxPtr}; +use rayon::prelude::*; +use relative_path::RelativePathBuf; +use rustc_hash::FxHashMap; +use salsa::ParallelDatabase; + +use crate::{ + symbol_index::{FileSymbol, SymbolIndex}, + db::LineIndexDatabase, +}; + +pub use crate::{ + completion::{CompletionItem, CompletionItemKind, InsertText}, + runnables::{Runnable, RunnableKind}, +}; +pub use ra_ide_api_light::{ + Fold, FoldKind, HighlightedRange, Severity, StructureNode, + LineIndex, LineCol, translate_offset_with_edit, +}; +pub use ra_db::{ + Cancelable, Canceled, CrateGraph, CrateId, FileId, FilePosition, FileRange, SourceRootId +}; + +#[derive(Default)] +pub struct AnalysisChange { + new_roots: Vec<(SourceRootId, bool)>, + roots_changed: FxHashMap, + files_changed: Vec<(FileId, Arc)>, + libraries_added: Vec, + crate_graph: Option, +} + +#[derive(Default)] +struct RootChange { + added: Vec, + removed: Vec, +} + +#[derive(Debug)] +struct AddFile { + file_id: FileId, + path: RelativePathBuf, + text: Arc, +} + +#[derive(Debug)] +struct RemoveFile { + file_id: FileId, + path: RelativePathBuf, +} + +impl fmt::Debug for AnalysisChange { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + let mut d = fmt.debug_struct("AnalysisChange"); + if !self.new_roots.is_empty() { + d.field("new_roots", &self.new_roots); + } + if !self.roots_changed.is_empty() { + d.field("roots_changed", &self.roots_changed); + } + if !self.files_changed.is_empty() { + d.field("files_changed", &self.files_changed.len()); + } + if !self.libraries_added.is_empty() { + d.field("libraries_added", &self.libraries_added.len()); + } + if !self.crate_graph.is_some() { + d.field("crate_graph", &self.crate_graph); + } + d.finish() + } +} + +impl fmt::Debug for RootChange { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + fmt.debug_struct("AnalysisChange") + .field("added", &self.added.len()) + .field("removed", &self.removed.len()) + .finish() + } +} + +impl AnalysisChange { + pub fn new() -> AnalysisChange { + AnalysisChange::default() + } + pub fn add_root(&mut self, root_id: SourceRootId, is_local: bool) { + self.new_roots.push((root_id, is_local)); + } + pub fn add_file( + &mut self, + root_id: SourceRootId, + file_id: FileId, + path: RelativePathBuf, + text: Arc, + ) { + let file = AddFile { + file_id, + path, + text, + }; + self.roots_changed + .entry(root_id) + .or_default() + .added + .push(file); + } + pub fn change_file(&mut self, file_id: FileId, new_text: Arc) { + self.files_changed.push((file_id, new_text)) + } + pub fn remove_file(&mut self, root_id: SourceRootId, file_id: FileId, path: RelativePathBuf) { + let file = RemoveFile { file_id, path }; + self.roots_changed + .entry(root_id) + .or_default() + .removed + .push(file); + } + pub fn add_library(&mut self, data: LibraryData) { + self.libraries_added.push(data) + } + pub fn set_crate_graph(&mut self, graph: CrateGraph) { + self.crate_graph = Some(graph); + } +} + +#[derive(Debug)] +pub struct SourceChange { + pub label: String, + pub source_file_edits: Vec, + pub file_system_edits: Vec, + pub cursor_position: Option, +} + +#[derive(Debug)] +pub struct SourceFileEdit { + pub file_id: FileId, + pub edit: TextEdit, +} + +#[derive(Debug)] +pub enum FileSystemEdit { + CreateFile { + source_root: SourceRootId, + path: RelativePathBuf, + }, + MoveFile { + src: FileId, + dst_source_root: SourceRootId, + dst_path: RelativePathBuf, + }, +} + +#[derive(Debug)] +pub struct Diagnostic { + pub message: String, + pub range: TextRange, + pub fix: Option, + pub severity: Severity, +} + +#[derive(Debug)] +pub struct Query { + query: String, + lowercased: String, + only_types: bool, + libs: bool, + exact: bool, + limit: usize, +} + +impl Query { + pub fn new(query: String) -> Query { + let lowercased = query.to_lowercase(); + Query { + query, + lowercased, + only_types: false, + libs: false, + exact: false, + limit: usize::max_value(), + } + } + pub fn only_types(&mut self) { + self.only_types = true; + } + pub fn libs(&mut self) { + self.libs = true; + } + pub fn exact(&mut self) { + self.exact = true; + } + pub fn limit(&mut self, limit: usize) { + self.limit = limit + } +} + +/// `NavigationTarget` represents and element in the editor's UI whihc you can +/// click on to navigate to a particular piece of code. +/// +/// Typically, a `NavigationTarget` corresponds to some element in the source +/// code, like a function or a struct, but this is not strictly required. +#[derive(Debug, Clone)] +pub struct NavigationTarget { + file_id: FileId, + name: SmolStr, + kind: SyntaxKind, + range: TextRange, + // Should be DefId ideally + ptr: Option, +} + +impl NavigationTarget { + fn from_symbol(symbol: FileSymbol) -> NavigationTarget { + NavigationTarget { + file_id: symbol.file_id, + name: symbol.name.clone(), + kind: symbol.ptr.kind(), + range: symbol.ptr.range(), + ptr: Some(symbol.ptr.clone()), + } + } + pub fn name(&self) -> &SmolStr { + &self.name + } + pub fn kind(&self) -> SyntaxKind { + self.kind + } + pub fn file_id(&self) -> FileId { + self.file_id + } + pub fn range(&self) -> TextRange { + self.range + } +} + +#[derive(Debug)] +pub struct RangeInfo { + pub range: TextRange, + pub info: T, +} + +impl RangeInfo { + fn new(range: TextRange, info: T) -> RangeInfo { + RangeInfo { range, info } + } +} + +#[derive(Debug)] +pub struct CallInfo { + pub label: String, + pub doc: Option, + pub parameters: Vec, + pub active_parameter: Option, +} + +/// `AnalysisHost` stores the current state of the world. +#[derive(Debug, Default)] +pub struct AnalysisHost { + db: db::RootDatabase, +} + +impl AnalysisHost { + /// Returns a snapshot of the current state, which you can query for + /// semantic information. + pub fn analysis(&self) -> Analysis { + Analysis { + db: self.db.snapshot(), + } + } + /// Applies changes to the current state of the world. If there are + /// outstanding snapshots, they will be canceled. + pub fn apply_change(&mut self, change: AnalysisChange) { + self.db.apply_change(change) + } +} + +/// Analysis is a snapshot of a world state at a moment in time. It is the main +/// entry point for asking semantic information about the world. When the world +/// state is advanced using `AnalysisHost::apply_change` method, all existing +/// `Analysis` are canceled (most method return `Err(Canceled)`). +#[derive(Debug)] +pub struct Analysis { + db: salsa::Snapshot, +} + +impl Analysis { + /// Gets the text of the source file. + pub fn file_text(&self, file_id: FileId) -> Arc { + self.db.file_text(file_id) + } + /// Gets the syntax tree of the file. + pub fn file_syntax(&self, file_id: FileId) -> TreePtr { + self.db.source_file(file_id).clone() + } + /// Gets the file's `LineIndex`: data structure to convert between absolute + /// offsets and line/column representation. + pub fn file_line_index(&self, file_id: FileId) -> Arc { + self.db.line_index(file_id) + } + /// Selects the next syntactic nodes encopasing the range. + pub fn extend_selection(&self, frange: FileRange) -> TextRange { + extend_selection::extend_selection(&self.db, frange) + } + /// Returns position of the mathcing brace (all types of braces are + /// supported). + pub fn matching_brace(&self, file: &SourceFile, offset: TextUnit) -> Option { + ra_ide_api_light::matching_brace(file, offset) + } + /// Returns a syntax tree represented as `String`, for debug purposes. + // FIXME: use a better name here. + pub fn syntax_tree(&self, file_id: FileId) -> String { + let file = self.db.source_file(file_id); + ra_ide_api_light::syntax_tree(&file) + } + /// Returns an edit to remove all newlines in the range, cleaning up minor + /// stuff like trailing commas. + pub fn join_lines(&self, frange: FileRange) -> SourceChange { + let file = self.db.source_file(frange.file_id); + SourceChange::from_local_edit( + frange.file_id, + ra_ide_api_light::join_lines(&file, frange.range), + ) + } + /// Returns an edit which should be applied when opening a new line, fixing + /// up minor stuff like continuing the comment. + pub fn on_enter(&self, position: FilePosition) -> Option { + let file = self.db.source_file(position.file_id); + let edit = ra_ide_api_light::on_enter(&file, position.offset)?; + Some(SourceChange::from_local_edit(position.file_id, edit)) + } + /// Returns an edit which should be applied after `=` was typed. Primarily, + /// this works when adding `let =`. + // FIXME: use a snippet completion instead of this hack here. + pub fn on_eq_typed(&self, position: FilePosition) -> Option { + let file = self.db.source_file(position.file_id); + let edit = ra_ide_api_light::on_eq_typed(&file, position.offset)?; + Some(SourceChange::from_local_edit(position.file_id, edit)) + } + /// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately. + pub fn on_dot_typed(&self, position: FilePosition) -> Option { + let file = self.db.source_file(position.file_id); + let edit = ra_ide_api_light::on_dot_typed(&file, position.offset)?; + Some(SourceChange::from_local_edit(position.file_id, edit)) + } + /// Returns a tree representation of symbols in the file. Useful to draw a + /// file outline. + pub fn file_structure(&self, file_id: FileId) -> Vec { + let file = self.db.source_file(file_id); + ra_ide_api_light::file_structure(&file) + } + /// Returns the set of folding ranges. + pub fn folding_ranges(&self, file_id: FileId) -> Vec { + let file = self.db.source_file(file_id); + ra_ide_api_light::folding_ranges(&file) + } + /// Fuzzy searches for a symbol. + pub fn symbol_search(&self, query: Query) -> Cancelable> { + let res = symbol_index::world_symbols(&*self.db, query)? + .into_iter() + .map(NavigationTarget::from_symbol) + .collect(); + Ok(res) + } + pub fn goto_defenition( + &self, + position: FilePosition, + ) -> Cancelable>> { + goto_defenition::goto_defenition(&*self.db, position) + } + /// Finds all usages of the reference at point. + pub fn find_all_refs(&self, position: FilePosition) -> Cancelable> { + self.db.find_all_refs(position) + } + /// Returns a short text descrbing element at position. + pub fn hover(&self, position: FilePosition) -> Cancelable>> { + hover::hover(&*self.db, position) + } + /// Computes parameter information for the given call expression. + pub fn call_info(&self, position: FilePosition) -> Cancelable> { + call_info::call_info(&*self.db, position) + } + /// Returns a `mod name;` declaration which created the current module. + pub fn parent_module(&self, position: FilePosition) -> Cancelable> { + self.db.parent_module(position) + } + /// Returns crates this file belongs too. + pub fn crate_for(&self, file_id: FileId) -> Cancelable> { + self.db.crate_for(file_id) + } + /// Returns the root file of the given crate. + pub fn crate_root(&self, crate_id: CrateId) -> Cancelable { + Ok(self.db.crate_graph().crate_root(crate_id)) + } + /// Returns the set of possible targets to run for the current file. + pub fn runnables(&self, file_id: FileId) -> Cancelable> { + runnables::runnables(&*self.db, file_id) + } + /// Computes syntax highlighting for the given file. + pub fn highlight(&self, file_id: FileId) -> Cancelable> { + syntax_highlighting::highlight(&*self.db, file_id) + } + /// Computes completions at the given position. + pub fn completions(&self, position: FilePosition) -> Cancelable>> { + let completions = completion::completions(&self.db, position)?; + Ok(completions.map(|it| it.into())) + } + /// Computes assists (aks code actons aka intentions) for the given + /// position. + pub fn assists(&self, frange: FileRange) -> Cancelable> { + Ok(self.db.assists(frange)) + } + /// Computes the set of diagnostics for the given file. + pub fn diagnostics(&self, file_id: FileId) -> Cancelable> { + self.db.diagnostics(file_id) + } + /// Computes the type of the expression at the given position. + pub fn type_of(&self, frange: FileRange) -> Cancelable> { + hover::type_of(&*self.db, frange) + } + /// Returns the edit required to rename reference at the position to the new + /// name. + pub fn rename( + &self, + position: FilePosition, + new_name: &str, + ) -> Cancelable> { + self.db.rename(position, new_name) + } +} + +pub struct LibraryData { + root_id: SourceRootId, + root_change: RootChange, + symbol_index: SymbolIndex, +} + +impl fmt::Debug for LibraryData { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("LibraryData") + .field("root_id", &self.root_id) + .field("root_change", &self.root_change) + .field("n_symbols", &self.symbol_index.len()) + .finish() + } +} + +impl LibraryData { + pub fn prepare( + root_id: SourceRootId, + files: Vec<(FileId, RelativePathBuf, Arc)>, + ) -> LibraryData { + let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, _, text)| { + let file = SourceFile::parse(text); + (*file_id, file) + })); + let mut root_change = RootChange::default(); + root_change.added = files + .into_iter() + .map(|(file_id, path, text)| AddFile { + file_id, + path, + text, + }) + .collect(); + LibraryData { + root_id, + root_change, + symbol_index, + } + } +} + +#[test] +fn analysis_is_send() { + fn is_send() {} + is_send::(); +} diff --git a/crates/ra_ide_api/src/mock_analysis.rs b/crates/ra_ide_api/src/mock_analysis.rs new file mode 100644 index 000000000..846c76cfe --- /dev/null +++ b/crates/ra_ide_api/src/mock_analysis.rs @@ -0,0 +1,135 @@ +use std::sync::Arc; + +use relative_path::RelativePathBuf; +use test_utils::{extract_offset, extract_range, parse_fixture, CURSOR_MARKER}; +use ra_db::mock::FileMap; + +use crate::{Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, FilePosition, FileRange, SourceRootId}; + +/// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis +/// from a set of in-memory files. +#[derive(Debug, Default)] +pub struct MockAnalysis { + files: Vec<(String, String)>, +} + +impl MockAnalysis { + pub fn new() -> MockAnalysis { + MockAnalysis::default() + } + /// Creates `MockAnalysis` using a fixture data in the following format: + /// + /// ```notrust + /// //- /main.rs + /// mod foo; + /// fn main() {} + /// + /// //- /foo.rs + /// struct Baz; + /// ``` + pub fn with_files(fixture: &str) -> MockAnalysis { + let mut res = MockAnalysis::new(); + for entry in parse_fixture(fixture) { + res.add_file(&entry.meta, &entry.text); + } + res + } + + /// Same as `with_files`, but requires that a single file contains a `<|>` marker, + /// whose position is also returned. + pub fn with_files_and_position(fixture: &str) -> (MockAnalysis, FilePosition) { + let mut position = None; + let mut res = MockAnalysis::new(); + for entry in parse_fixture(fixture) { + if entry.text.contains(CURSOR_MARKER) { + assert!( + position.is_none(), + "only one marker (<|>) per fixture is allowed" + ); + position = Some(res.add_file_with_position(&entry.meta, &entry.text)); + } else { + res.add_file(&entry.meta, &entry.text); + } + } + let position = position.expect("expected a marker (<|>)"); + (res, position) + } + + pub fn add_file(&mut self, path: &str, text: &str) -> FileId { + let file_id = FileId((self.files.len() + 1) as u32); + self.files.push((path.to_string(), text.to_string())); + file_id + } + pub fn add_file_with_position(&mut self, path: &str, text: &str) -> FilePosition { + let (offset, text) = extract_offset(text); + let file_id = FileId((self.files.len() + 1) as u32); + self.files.push((path.to_string(), text.to_string())); + FilePosition { file_id, offset } + } + pub fn add_file_with_range(&mut self, path: &str, text: &str) -> FileRange { + let (range, text) = extract_range(text); + let file_id = FileId((self.files.len() + 1) as u32); + self.files.push((path.to_string(), text.to_string())); + FileRange { file_id, range } + } + pub fn id_of(&self, path: &str) -> FileId { + let (idx, _) = self + .files + .iter() + .enumerate() + .find(|(_, (p, _text))| path == p) + .expect("no file in this mock"); + FileId(idx as u32 + 1) + } + pub fn analysis_host(self) -> AnalysisHost { + let mut host = AnalysisHost::default(); + let mut file_map = FileMap::default(); + let source_root = SourceRootId(0); + let mut change = AnalysisChange::new(); + change.add_root(source_root, true); + let mut crate_graph = CrateGraph::default(); + for (path, contents) in self.files.into_iter() { + assert!(path.starts_with('/')); + let path = RelativePathBuf::from_path(&path[1..]).unwrap(); + let file_id = file_map.add(path.clone()); + if path == "/lib.rs" || path == "/main.rs" { + crate_graph.add_crate_root(file_id); + } + change.add_file(source_root, file_id, path, Arc::new(contents)); + } + change.set_crate_graph(crate_graph); + // change.set_file_resolver(Arc::new(file_map)); + host.apply_change(change); + host + } + pub fn analysis(self) -> Analysis { + self.analysis_host().analysis() + } +} + +/// Creates analysis from a multi-file fixture, returns positions marked with <|>. +pub fn analysis_and_position(fixture: &str) -> (Analysis, FilePosition) { + let (mock, position) = MockAnalysis::with_files_and_position(fixture); + (mock.analysis(), position) +} + +/// Creates analysis for a single file. +pub fn single_file(code: &str) -> (Analysis, FileId) { + let mut mock = MockAnalysis::new(); + let file_id = mock.add_file("/main.rs", code); + (mock.analysis(), file_id) +} + +/// Creates analysis for a single file, returns position marked with <|>. +pub fn single_file_with_position(code: &str) -> (Analysis, FilePosition) { + let mut mock = MockAnalysis::new(); + let pos = mock.add_file_with_position("/main.rs", code); + (mock.analysis(), pos) +} + +/// Creates analysis for a single file, returns range marked with a pair of <|>. +pub fn single_file_with_range(code: &str) -> (Analysis, FileRange) { + let mut mock = MockAnalysis::new(); + let pos = mock.add_file_with_range("/main.rs", code); + (mock.analysis(), pos) +} diff --git a/crates/ra_ide_api/src/runnables.rs b/crates/ra_ide_api/src/runnables.rs new file mode 100644 index 000000000..98b1d2d55 --- /dev/null +++ b/crates/ra_ide_api/src/runnables.rs @@ -0,0 +1,89 @@ +use itertools::Itertools; +use ra_syntax::{ + TextRange, SyntaxNode, + ast::{self, AstNode, NameOwner, ModuleItemOwner}, +}; +use ra_db::{Cancelable, SyntaxDatabase}; + +use crate::{db::RootDatabase, FileId}; + +#[derive(Debug)] +pub struct Runnable { + pub range: TextRange, + pub kind: RunnableKind, +} + +#[derive(Debug)] +pub enum RunnableKind { + Test { name: String }, + TestMod { path: String }, + Bin, +} + +pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Cancelable> { + let source_file = db.source_file(file_id); + let res = source_file + .syntax() + .descendants() + .filter_map(|i| runnable(db, file_id, i)) + .collect(); + Ok(res) +} + +fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option { + if let Some(fn_def) = ast::FnDef::cast(item) { + runnable_fn(fn_def) + } else if let Some(m) = ast::Module::cast(item) { + runnable_mod(db, file_id, m) + } else { + None + } +} + +fn runnable_fn(fn_def: &ast::FnDef) -> Option { + let name = fn_def.name()?.text(); + let kind = if name == "main" { + RunnableKind::Bin + } else if fn_def.has_atom_attr("test") { + RunnableKind::Test { + name: name.to_string(), + } + } else { + return None; + }; + Some(Runnable { + range: fn_def.syntax().range(), + kind, + }) +} + +fn runnable_mod(db: &RootDatabase, file_id: FileId, module: &ast::Module) -> Option { + let has_test_function = module + .item_list()? + .items() + .filter_map(|it| match it.kind() { + ast::ModuleItemKind::FnDef(it) => Some(it), + _ => None, + }) + .any(|f| f.has_atom_attr("test")); + if !has_test_function { + return None; + } + let range = module.syntax().range(); + let module = + hir::source_binder::module_from_child_node(db, file_id, module.syntax()).ok()??; + + // FIXME: thread cancellation instead of `.ok`ing + let path = module + .path_to_root(db) + .ok()? + .into_iter() + .rev() + .filter_map(|it| it.name(db).ok()) + .filter_map(|it| it) + .join("::"); + Some(Runnable { + range, + kind: RunnableKind::TestMod { path }, + }) +} diff --git a/crates/ra_ide_api/src/symbol_index.rs b/crates/ra_ide_api/src/symbol_index.rs new file mode 100644 index 000000000..8dd15b40e --- /dev/null +++ b/crates/ra_ide_api/src/symbol_index.rs @@ -0,0 +1,222 @@ +//! This module handles fuzzy-searching of functions, structs and other symbols +//! by name across the whole workspace and dependencies. +//! +//! It works by building an incrementally-updated text-search index of all +//! symbols. The backbone of the index is the **awesome** `fst` crate by +//! @BurntSushi. +//! +//! In a nutshell, you give a set of strings to the `fst`, and it builds a +//! finite state machine describing this set of strtings. The strings which +//! could fuzzy-match a pattern can also be described by a finite state machine. +//! What is freakingly cool is that you can now traverse both state machines in +//! lock-step to enumerate the strings which are both in the input set and +//! fuzz-match the query. Or, more formally, given two langauges described by +//! fsts, one can build an product fst which describes the intersection of the +//! languages. +//! +//! `fst` does not support cheap updating of the index, but it supports unioning +//! of state machines. So, to account for changing source code, we build an fst +//! for each library (which is assumed to never change) and an fst for each rust +//! file in the current workspace, and run a query aginst the union of all +//! thouse fsts. +use std::{ + cmp::Ordering, + hash::{Hash, Hasher}, + sync::Arc, +}; + +use fst::{self, Streamer}; +use ra_syntax::{ + SyntaxNode, SourceFile, SmolStr, TreePtr, AstNode, + algo::{visit::{visitor, Visitor}, find_covering_node}, + SyntaxKind::{self, *}, + ast::{self, NameOwner}, +}; +use ra_db::{SourceRootId, FilesDatabase, LocalSyntaxPtr}; +use salsa::ParallelDatabase; +use rayon::prelude::*; + +use crate::{ + Cancelable, FileId, Query, + db::RootDatabase, +}; + +salsa::query_group! { + pub(crate) trait SymbolsDatabase: hir::db::HirDatabase { + fn file_symbols(file_id: FileId) -> Cancelable> { + type FileSymbolsQuery; + } + fn library_symbols(id: SourceRootId) -> Arc { + type LibrarySymbolsQuery; + storage input; + } + } +} + +fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Cancelable> { + db.check_canceled()?; + let source_file = db.source_file(file_id); + let mut symbols = source_file + .syntax() + .descendants() + .filter_map(to_symbol) + .map(move |(name, ptr)| FileSymbol { name, ptr, file_id }) + .collect::>(); + + for (name, text_range) in hir::source_binder::macro_symbols(db, file_id)? { + let node = find_covering_node(source_file.syntax(), text_range); + let ptr = LocalSyntaxPtr::new(node); + symbols.push(FileSymbol { file_id, name, ptr }) + } + + Ok(Arc::new(SymbolIndex::new(symbols))) +} + +pub(crate) fn world_symbols(db: &RootDatabase, query: Query) -> Cancelable> { + /// Need to wrap Snapshot to provide `Clone` impl for `map_with` + struct Snap(salsa::Snapshot); + impl Clone for Snap { + fn clone(&self) -> Snap { + Snap(self.0.snapshot()) + } + } + + let buf: Vec> = if query.libs { + let snap = Snap(db.snapshot()); + db.library_roots() + .par_iter() + .map_with(snap, |db, &lib_id| db.0.library_symbols(lib_id)) + .collect() + } else { + let mut files = Vec::new(); + for &root in db.local_roots().iter() { + let sr = db.source_root(root); + files.extend(sr.files.values().map(|&it| it)) + } + + let snap = Snap(db.snapshot()); + files + .par_iter() + .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) + .filter_map(|it| it.ok()) + .collect() + }; + Ok(query.search(&buf)) +} + +#[derive(Default, Debug)] +pub(crate) struct SymbolIndex { + symbols: Vec, + map: fst::Map, +} + +impl PartialEq for SymbolIndex { + fn eq(&self, other: &SymbolIndex) -> bool { + self.symbols == other.symbols + } +} + +impl Eq for SymbolIndex {} + +impl Hash for SymbolIndex { + fn hash(&self, hasher: &mut H) { + self.symbols.hash(hasher) + } +} + +impl SymbolIndex { + fn new(mut symbols: Vec) -> SymbolIndex { + fn cmp(s1: &FileSymbol, s2: &FileSymbol) -> Ordering { + unicase::Ascii::new(s1.name.as_str()).cmp(&unicase::Ascii::new(s2.name.as_str())) + } + symbols.par_sort_by(cmp); + symbols.dedup_by(|s1, s2| cmp(s1, s2) == Ordering::Equal); + let names = symbols.iter().map(|it| it.name.as_str().to_lowercase()); + let map = fst::Map::from_iter(names.into_iter().zip(0u64..)).unwrap(); + SymbolIndex { symbols, map } + } + + pub(crate) fn len(&self) -> usize { + self.symbols.len() + } + + pub(crate) fn for_files( + files: impl ParallelIterator)>, + ) -> SymbolIndex { + let symbols = files + .flat_map(|(file_id, file)| { + file.syntax() + .descendants() + .filter_map(to_symbol) + .map(move |(name, ptr)| FileSymbol { name, ptr, file_id }) + .collect::>() + }) + .collect::>(); + SymbolIndex::new(symbols) + } +} + +impl Query { + pub(crate) fn search(self, indices: &[Arc]) -> Vec { + let mut op = fst::map::OpBuilder::new(); + for file_symbols in indices.iter() { + let automaton = fst::automaton::Subsequence::new(&self.lowercased); + op = op.add(file_symbols.map.search(automaton)) + } + let mut stream = op.union(); + let mut res = Vec::new(); + while let Some((_, indexed_values)) = stream.next() { + if res.len() >= self.limit { + break; + } + for indexed_value in indexed_values { + let file_symbols = &indices[indexed_value.index]; + let idx = indexed_value.value as usize; + + let symbol = &file_symbols.symbols[idx]; + if self.only_types && !is_type(symbol.ptr.kind()) { + continue; + } + if self.exact && symbol.name != self.query { + continue; + } + res.push(symbol.clone()); + } + } + res + } +} + +fn is_type(kind: SyntaxKind) -> bool { + match kind { + STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_DEF => true, + _ => false, + } +} + +/// The actual data that is stored in the index. It should be as compact as +/// possible. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub(crate) struct FileSymbol { + pub(crate) file_id: FileId, + pub(crate) name: SmolStr, + pub(crate) ptr: LocalSyntaxPtr, +} + +fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, LocalSyntaxPtr)> { + fn decl(node: &N) -> Option<(SmolStr, LocalSyntaxPtr)> { + let name = node.name()?.text().clone(); + let ptr = LocalSyntaxPtr::new(node.syntax()); + Some((name, ptr)) + } + visitor() + .visit(decl::) + .visit(decl::) + .visit(decl::) + .visit(decl::) + .visit(decl::) + .visit(decl::) + .visit(decl::) + .visit(decl::) + .accept(node)? +} diff --git a/crates/ra_ide_api/src/syntax_highlighting.rs b/crates/ra_ide_api/src/syntax_highlighting.rs new file mode 100644 index 000000000..cb19e9515 --- /dev/null +++ b/crates/ra_ide_api/src/syntax_highlighting.rs @@ -0,0 +1,92 @@ +use ra_syntax::{ast, AstNode,}; +use ra_db::SyntaxDatabase; + +use crate::{ + FileId, Cancelable, HighlightedRange, + db::RootDatabase, +}; + +pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Cancelable> { + let source_file = db.source_file(file_id); + let mut res = ra_ide_api_light::highlight(source_file.syntax()); + for macro_call in source_file + .syntax() + .descendants() + .filter_map(ast::MacroCall::cast) + { + if let Some((off, exp)) = hir::MacroDef::ast_expand(macro_call) { + let mapped_ranges = ra_ide_api_light::highlight(&exp.syntax()) + .into_iter() + .filter_map(|r| { + let mapped_range = exp.map_range_back(r.range)?; + let res = HighlightedRange { + range: mapped_range + off, + tag: r.tag, + }; + Some(res) + }); + res.extend(mapped_ranges); + } + } + Ok(res) +} + +#[cfg(test)] +mod tests { + use crate::mock_analysis::single_file; + use test_utils::assert_eq_dbg; + + #[test] + fn highlights_code_inside_macros() { + let (analysis, file_id) = single_file( + " + fn main() { + ctry!({ let x = 92; x}); + vec![{ let x = 92; x}]; + } + ", + ); + let highlights = analysis.highlight(file_id).unwrap(); + assert_eq_dbg( + r#"[HighlightedRange { range: [13; 15), tag: "keyword" }, + HighlightedRange { range: [16; 20), tag: "function" }, + HighlightedRange { range: [41; 46), tag: "macro" }, + HighlightedRange { range: [49; 52), tag: "keyword" }, + HighlightedRange { range: [57; 59), tag: "literal" }, + HighlightedRange { range: [82; 86), tag: "macro" }, + HighlightedRange { range: [89; 92), tag: "keyword" }, + HighlightedRange { range: [97; 99), tag: "literal" }, + HighlightedRange { range: [49; 52), tag: "keyword" }, + HighlightedRange { range: [53; 54), tag: "function" }, + HighlightedRange { range: [57; 59), tag: "literal" }, + HighlightedRange { range: [61; 62), tag: "text" }, + HighlightedRange { range: [89; 92), tag: "keyword" }, + HighlightedRange { range: [93; 94), tag: "function" }, + HighlightedRange { range: [97; 99), tag: "literal" }, + HighlightedRange { range: [101; 102), tag: "text" }]"#, + &highlights, + ) + } + + // FIXME: this test is not really necessary: artifact of the inital hacky + // macros implementation. + #[test] + fn highlight_query_group_macro() { + let (analysis, file_id) = single_file( + " + salsa::query_group! { + pub trait HirDatabase: SyntaxDatabase {} + } + ", + ); + let highlights = analysis.highlight(file_id).unwrap(); + assert_eq_dbg( + r#"[HighlightedRange { range: [20; 32), tag: "macro" }, + HighlightedRange { range: [13; 18), tag: "text" }, + HighlightedRange { range: [51; 54), tag: "keyword" }, + HighlightedRange { range: [55; 60), tag: "keyword" }, + HighlightedRange { range: [61; 72), tag: "function" }]"#, + &highlights, + ) + } +} diff --git a/crates/ra_ide_api/tests/test/main.rs b/crates/ra_ide_api/tests/test/main.rs new file mode 100644 index 000000000..d1dc07e5b --- /dev/null +++ b/crates/ra_ide_api/tests/test/main.rs @@ -0,0 +1,249 @@ +mod runnables; + +use ra_syntax::TextRange; +use test_utils::{assert_eq_dbg, assert_eq_text}; + +use ra_ide_api::{ + mock_analysis::{analysis_and_position, single_file, single_file_with_position, MockAnalysis}, + AnalysisChange, CrateGraph, FileId, Query +}; + +#[test] +fn test_unresolved_module_diagnostic() { + let (analysis, file_id) = single_file("mod foo;"); + let diagnostics = analysis.diagnostics(file_id).unwrap(); + assert_eq_dbg( + r#"[Diagnostic { + message: "unresolved module", + range: [4; 7), + fix: Some(SourceChange { + label: "create module", + source_file_edits: [], + file_system_edits: [CreateFile { source_root: SourceRootId(0), path: "foo.rs" }], + cursor_position: None }), + severity: Error }]"#, + &diagnostics, + ); +} + +// FIXME: move this test to hir +#[test] +fn test_unresolved_module_diagnostic_no_diag_for_inline_mode() { + let (analysis, file_id) = single_file("mod foo {}"); + let diagnostics = analysis.diagnostics(file_id).unwrap(); + assert_eq_dbg(r#"[]"#, &diagnostics); +} + +#[test] +fn test_resolve_parent_module() { + let (analysis, pos) = analysis_and_position( + " + //- /lib.rs + mod foo; + //- /foo.rs + <|>// empty + ", + ); + let symbols = analysis.parent_module(pos).unwrap(); + assert_eq_dbg( + r#"[NavigationTarget { file_id: FileId(1), name: "foo", kind: MODULE, range: [4; 7), ptr: None }]"#, + &symbols, + ); +} + +#[test] +fn test_resolve_parent_module_for_inline() { + let (analysis, pos) = analysis_and_position( + " + //- /lib.rs + mod foo { + mod bar { + mod baz { <|> } + } + } + ", + ); + let symbols = analysis.parent_module(pos).unwrap(); + assert_eq_dbg( + r#"[NavigationTarget { file_id: FileId(1), name: "baz", kind: MODULE, range: [36; 39), ptr: None }]"#, + &symbols, + ); +} + +#[test] +fn test_resolve_crate_root() { + let mock = MockAnalysis::with_files( + " + //- /bar.rs + mod foo; + //- /bar/foo.rs + // emtpy <|> + ", + ); + let root_file = mock.id_of("/bar.rs"); + let mod_file = mock.id_of("/bar/foo.rs"); + let mut host = mock.analysis_host(); + assert!(host.analysis().crate_for(mod_file).unwrap().is_empty()); + + let mut crate_graph = CrateGraph::default(); + let crate_id = crate_graph.add_crate_root(root_file); + let mut change = AnalysisChange::new(); + change.set_crate_graph(crate_graph); + host.apply_change(change); + + assert_eq!(host.analysis().crate_for(mod_file).unwrap(), vec![crate_id]); +} + +fn get_all_refs(text: &str) -> Vec<(FileId, TextRange)> { + let (analysis, position) = single_file_with_position(text); + analysis.find_all_refs(position).unwrap() +} + +#[test] +fn test_find_all_refs_for_local() { + let code = r#" + fn main() { + let mut i = 1; + let j = 1; + i = i<|> + j; + + { + i = 0; + } + + i = 5; + }"#; + + let refs = get_all_refs(code); + assert_eq!(refs.len(), 5); +} + +#[test] +fn test_find_all_refs_for_param_inside() { + let code = r#" + fn foo(i : u32) -> u32 { + i<|> + }"#; + + let refs = get_all_refs(code); + assert_eq!(refs.len(), 2); +} + +#[test] +fn test_find_all_refs_for_fn_param() { + let code = r#" + fn foo(i<|> : u32) -> u32 { + i + }"#; + + let refs = get_all_refs(code); + assert_eq!(refs.len(), 2); +} +#[test] +fn test_rename_for_local() { + test_rename( + r#" + fn main() { + let mut i = 1; + let j = 1; + i = i<|> + j; + + { + i = 0; + } + + i = 5; + }"#, + "k", + r#" + fn main() { + let mut k = 1; + let j = 1; + k = k + j; + + { + k = 0; + } + + k = 5; + }"#, + ); +} + +#[test] +fn test_rename_for_param_inside() { + test_rename( + r#" + fn foo(i : u32) -> u32 { + i<|> + }"#, + "j", + r#" + fn foo(j : u32) -> u32 { + j + }"#, + ); +} + +#[test] +fn test_rename_refs_for_fn_param() { + test_rename( + r#" + fn foo(i<|> : u32) -> u32 { + i + }"#, + "new_name", + r#" + fn foo(new_name : u32) -> u32 { + new_name + }"#, + ); +} + +#[test] +fn test_rename_for_mut_param() { + test_rename( + r#" + fn foo(mut i<|> : u32) -> u32 { + i + }"#, + "new_name", + r#" + fn foo(mut new_name : u32) -> u32 { + new_name + }"#, + ); +} + +fn test_rename(text: &str, new_name: &str, expected: &str) { + let (analysis, position) = single_file_with_position(text); + let edits = analysis.rename(position, new_name).unwrap(); + let mut text_edit_bulder = ra_text_edit::TextEditBuilder::default(); + let mut file_id: Option = None; + for edit in edits { + file_id = Some(edit.file_id); + for atom in edit.edit.as_atoms() { + text_edit_bulder.replace(atom.delete, atom.insert.clone()); + } + } + let result = text_edit_bulder + .finish() + .apply(&*analysis.file_text(file_id.unwrap())); + assert_eq_text!(expected, &*result); +} + +#[test] +fn world_symbols_include_stuff_from_macros() { + let (analysis, _) = single_file( + " +salsa::query_group! { +pub trait HirDatabase: SyntaxDatabase {} +} + ", + ); + + let mut symbols = analysis.symbol_search(Query::new("Hir".into())).unwrap(); + let s = symbols.pop().unwrap(); + assert_eq!(s.name(), "HirDatabase"); + assert_eq!(s.range(), TextRange::from_to(33.into(), 44.into())); +} diff --git a/crates/ra_ide_api/tests/test/runnables.rs b/crates/ra_ide_api/tests/test/runnables.rs new file mode 100644 index 000000000..da8d5e0d5 --- /dev/null +++ b/crates/ra_ide_api/tests/test/runnables.rs @@ -0,0 +1,109 @@ +use test_utils::assert_eq_dbg; + +use ra_ide_api::mock_analysis::analysis_and_position; + +#[test] +fn test_runnables() { + let (analysis, pos) = analysis_and_position( + r#" + //- /lib.rs + <|> //empty + fn main() {} + + #[test] + fn test_foo() {} + + #[test] + #[ignore] + fn test_foo() {} + "#, + ); + let runnables = analysis.runnables(pos.file_id).unwrap(); + assert_eq_dbg( + r#"[Runnable { range: [1; 21), kind: Bin }, + Runnable { range: [22; 46), kind: Test { name: "test_foo" } }, + Runnable { range: [47; 81), kind: Test { name: "test_foo" } }]"#, + &runnables, + ) +} + +#[test] +fn test_runnables_module() { + let (analysis, pos) = analysis_and_position( + r#" + //- /lib.rs + <|> //empty + mod test_mod { + #[test] + fn test_foo1() {} + } + "#, + ); + let runnables = analysis.runnables(pos.file_id).unwrap(); + assert_eq_dbg( + r#"[Runnable { range: [1; 59), kind: TestMod { path: "test_mod" } }, + Runnable { range: [28; 57), kind: Test { name: "test_foo1" } }]"#, + &runnables, + ) +} + +#[test] +fn test_runnables_one_depth_layer_module() { + let (analysis, pos) = analysis_and_position( + r#" + //- /lib.rs + <|> //empty + mod foo { + mod test_mod { + #[test] + fn test_foo1() {} + } + } + "#, + ); + let runnables = analysis.runnables(pos.file_id).unwrap(); + assert_eq_dbg( + r#"[Runnable { range: [23; 85), kind: TestMod { path: "foo::test_mod" } }, + Runnable { range: [46; 79), kind: Test { name: "test_foo1" } }]"#, + &runnables, + ) +} + +#[test] +fn test_runnables_multiple_depth_module() { + let (analysis, pos) = analysis_and_position( + r#" + //- /lib.rs + <|> //empty + mod foo { + mod bar { + mod test_mod { + #[test] + fn test_foo1() {} + } + } + } + "#, + ); + let runnables = analysis.runnables(pos.file_id).unwrap(); + assert_eq_dbg( + r#"[Runnable { range: [41; 115), kind: TestMod { path: "foo::bar::test_mod" } }, + Runnable { range: [68; 105), kind: Test { name: "test_foo1" } }]"#, + &runnables, + ) +} + +#[test] +fn test_runnables_no_test_function_in_module() { + let (analysis, pos) = analysis_and_position( + r#" + //- /lib.rs + <|> //empty + mod test_mod { + fn foo1() {} + } + "#, + ); + let runnables = analysis.runnables(pos.file_id).unwrap(); + assert_eq_dbg(r#"[]"#, &runnables) +} diff --git a/crates/ra_lsp_server/Cargo.toml b/crates/ra_lsp_server/Cargo.toml index b9fd61105..296fae34f 100644 --- a/crates/ra_lsp_server/Cargo.toml +++ b/crates/ra_lsp_server/Cargo.toml @@ -29,7 +29,7 @@ parking_lot = "0.7.0" thread_worker = { path = "../thread_worker" } ra_syntax = { path = "../ra_syntax" } ra_text_edit = { path = "../ra_text_edit" } -ra_analysis = { path = "../ra_analysis" } +ra_ide_api = { path = "../ra_ide_api" } gen_lsp_server = { path = "../gen_lsp_server" } ra_vfs = { path = "../ra_vfs" } diff --git a/crates/ra_lsp_server/src/conv.rs b/crates/ra_lsp_server/src/conv.rs index b3f8c83cc..5c8b3c194 100644 --- a/crates/ra_lsp_server/src/conv.rs +++ b/crates/ra_lsp_server/src/conv.rs @@ -4,7 +4,7 @@ use languageserver_types::{ TextDocumentItem, TextDocumentPositionParams, Url, VersionedTextDocumentIdentifier, WorkspaceEdit, }; -use ra_analysis::{ +use ra_ide_api::{ CompletionItem, CompletionItemKind, FileId, FilePosition, FileRange, FileSystemEdit, InsertText, NavigationTarget, SourceChange, SourceFileEdit, LineCol, LineIndex, translate_offset_with_edit diff --git a/crates/ra_lsp_server/src/main_loop.rs b/crates/ra_lsp_server/src/main_loop.rs index 2dc1be26a..96923fac7 100644 --- a/crates/ra_lsp_server/src/main_loop.rs +++ b/crates/ra_lsp_server/src/main_loop.rs @@ -10,7 +10,7 @@ use gen_lsp_server::{ handle_shutdown, ErrorCode, RawMessage, RawNotification, RawRequest, RawResponse, }; use languageserver_types::NumberOrString; -use ra_analysis::{Canceled, FileId, LibraryData}; +use ra_ide_api::{Canceled, FileId, LibraryData}; use ra_vfs::VfsTask; use rayon; use rustc_hash::FxHashSet; diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs index b7777bfc3..a653c5ada 100644 --- a/crates/ra_lsp_server/src/main_loop/handlers.rs +++ b/crates/ra_lsp_server/src/main_loop/handlers.rs @@ -8,7 +8,7 @@ use languageserver_types::{ ParameterInformation, ParameterLabel, Position, PrepareRenameResponse, Range, RenameParams, SignatureInformation, SymbolInformation, TextDocumentIdentifier, TextEdit, WorkspaceEdit, }; -use ra_analysis::{ +use ra_ide_api::{ FileId, FilePosition, FileRange, FoldKind, Query, RunnableKind, Severity, SourceChange, }; use ra_syntax::{TextUnit, AstNode}; @@ -736,7 +736,7 @@ fn highlight(world: &ServerWorld, file_id: FileId) -> Result> { } fn to_diagnostic_severity(severity: Severity) -> DiagnosticSeverity { - use ra_analysis::Severity::*; + use ra_ide_api::Severity::*; match severity { Error => DiagnosticSeverity::Error, diff --git a/crates/ra_lsp_server/src/main_loop/subscriptions.rs b/crates/ra_lsp_server/src/main_loop/subscriptions.rs index 03f41e870..a83e01557 100644 --- a/crates/ra_lsp_server/src/main_loop/subscriptions.rs +++ b/crates/ra_lsp_server/src/main_loop/subscriptions.rs @@ -1,4 +1,4 @@ -use ra_analysis::FileId; +use ra_ide_api::FileId; use rustc_hash::FxHashSet; pub struct Subscriptions { diff --git a/crates/ra_lsp_server/src/server_world.rs b/crates/ra_lsp_server/src/server_world.rs index ebf2b15cc..76c76766d 100644 --- a/crates/ra_lsp_server/src/server_world.rs +++ b/crates/ra_lsp_server/src/server_world.rs @@ -1,10 +1,10 @@ use std::{ - path::{PathBuf}, + path::PathBuf, sync::Arc, }; use languageserver_types::Url; -use ra_analysis::{ +use ra_ide_api::{ Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, LibraryData, SourceRootId }; @@ -12,7 +12,7 @@ use ra_vfs::{Vfs, VfsChange, VfsFile, VfsRoot}; use rustc_hash::FxHashMap; use relative_path::RelativePathBuf; use parking_lot::RwLock; -use failure::{format_err}; +use failure::format_err; use crate::{ project_model::{CargoWorkspace, TargetKind}, -- cgit v1.2.3 From 0c62b1bb7a49bf527780ce1f8cade5eb4fbfdb2d Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Tue, 8 Jan 2019 22:45:52 +0300 Subject: fix the docs --- crates/ra_db/src/lib.rs | 2 +- crates/ra_ide_api/src/lib.rs | 10 ++++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) (limited to 'crates') diff --git a/crates/ra_db/src/lib.rs b/crates/ra_db/src/lib.rs index f56f70983..e680d9fc3 100644 --- a/crates/ra_db/src/lib.rs +++ b/crates/ra_db/src/lib.rs @@ -1,4 +1,4 @@ -//! ra_db defines basic database traits. Concrete DB is defined by ra_analysis. +//! ra_db defines basic database traits. Concrete DB is defined by ra_ide_api. mod cancelation; mod syntax_ptr; mod input; diff --git a/crates/ra_ide_api/src/lib.rs b/crates/ra_ide_api/src/lib.rs index 183e36706..7e9ca2034 100644 --- a/crates/ra_ide_api/src/lib.rs +++ b/crates/ra_ide_api/src/lib.rs @@ -1,8 +1,14 @@ -//! ra_analyzer crate provides "ide-centric" APIs for the rust-analyzer. What -//! powers this API are the `RootDatabase` struct, which defines a `salsa` +//! ra_ide_api crate provides "ide-centric" APIs for the rust-analyzer. That is, +//! it generally operates with files and text ranges, and returns results as +//! Strings, suitable for displaying to the human. +//! +//! What powers this API are the `RootDatabase` struct, which defines a `salsa` //! database, and the `ra_hir` crate, where majority of the analysis happens. //! However, IDE specific bits of the analysis (most notably completion) happen //! in this crate. +//! +//! The sibling `ra_ide_api_light` handles thouse bits of IDE functionality +//! which are restricted to a single file and need only syntax. macro_rules! ctry { ($expr:expr) => { match $expr { -- cgit v1.2.3