diff options
Diffstat (limited to 'crates')
74 files changed, 1674 insertions, 679 deletions
diff --git a/crates/assists/Cargo.toml b/crates/assists/Cargo.toml index 108f656e9..3fd8327d6 100644 --- a/crates/assists/Cargo.toml +++ b/crates/assists/Cargo.toml | |||
@@ -12,7 +12,7 @@ doctest = false | |||
12 | [dependencies] | 12 | [dependencies] |
13 | rustc-hash = "1.1.0" | 13 | rustc-hash = "1.1.0" |
14 | itertools = "0.9.0" | 14 | itertools = "0.9.0" |
15 | either = "1.5.3" | 15 | either = "1.6.1" |
16 | 16 | ||
17 | stdx = { path = "../stdx", version = "0.0.0" } | 17 | stdx = { path = "../stdx", version = "0.0.0" } |
18 | syntax = { path = "../syntax", version = "0.0.0" } | 18 | syntax = { path = "../syntax", version = "0.0.0" } |
diff --git a/crates/assists/src/assist_config.rs b/crates/assists/src/assist_config.rs index b24527ec4..786224cfa 100644 --- a/crates/assists/src/assist_config.rs +++ b/crates/assists/src/assist_config.rs | |||
@@ -5,8 +5,9 @@ | |||
5 | //! assists if we are allowed to. | 5 | //! assists if we are allowed to. |
6 | 6 | ||
7 | use hir::PrefixKind; | 7 | use hir::PrefixKind; |
8 | use ide_db::helpers::insert_use::MergeBehaviour; | ||
8 | 9 | ||
9 | use crate::{utils::MergeBehaviour, AssistKind}; | 10 | use crate::AssistKind; |
10 | 11 | ||
11 | #[derive(Clone, Debug, PartialEq, Eq)] | 12 | #[derive(Clone, Debug, PartialEq, Eq)] |
12 | pub struct AssistConfig { | 13 | pub struct AssistConfig { |
diff --git a/crates/assists/src/ast_transform.rs b/crates/assists/src/ast_transform.rs index ac72f3f02..66e4634b1 100644 --- a/crates/assists/src/ast_transform.rs +++ b/crates/assists/src/ast_transform.rs | |||
@@ -1,5 +1,6 @@ | |||
1 | //! `AstTransformer`s are functions that replace nodes in an AST and can be easily combined. | 1 | //! `AstTransformer`s are functions that replace nodes in an AST and can be easily combined. |
2 | use hir::{HirDisplay, PathResolution, SemanticsScope}; | 2 | use hir::{HirDisplay, PathResolution, SemanticsScope}; |
3 | use ide_db::helpers::mod_path_to_ast; | ||
3 | use rustc_hash::FxHashMap; | 4 | use rustc_hash::FxHashMap; |
4 | use syntax::{ | 5 | use syntax::{ |
5 | algo::SyntaxRewriter, | 6 | algo::SyntaxRewriter, |
@@ -7,8 +8,6 @@ use syntax::{ | |||
7 | SyntaxNode, | 8 | SyntaxNode, |
8 | }; | 9 | }; |
9 | 10 | ||
10 | use crate::utils::mod_path_to_ast; | ||
11 | |||
12 | pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N { | 11 | pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N { |
13 | SyntaxRewriter::from_fn(|element| match element { | 12 | SyntaxRewriter::from_fn(|element| match element { |
14 | syntax::SyntaxElement::Node(n) => { | 13 | syntax::SyntaxElement::Node(n) => { |
diff --git a/crates/assists/src/handlers/auto_import.rs b/crates/assists/src/handlers/auto_import.rs index d665837a2..bd5bba646 100644 --- a/crates/assists/src/handlers/auto_import.rs +++ b/crates/assists/src/handlers/auto_import.rs | |||
@@ -1,8 +1,11 @@ | |||
1 | use ide_db::helpers::{ | ||
2 | insert_use::{insert_use, ImportScope}, | ||
3 | mod_path_to_ast, | ||
4 | }; | ||
1 | use syntax::ast; | 5 | use syntax::ast; |
2 | 6 | ||
3 | use crate::{ | 7 | use crate::{ |
4 | utils::import_assets::{ImportAssets, ImportCandidate}, | 8 | utils::import_assets::{ImportAssets, ImportCandidate}, |
5 | utils::{insert_use, mod_path_to_ast, ImportScope}, | ||
6 | AssistContext, AssistId, AssistKind, Assists, GroupLabel, | 9 | AssistContext, AssistId, AssistKind, Assists, GroupLabel, |
7 | }; | 10 | }; |
8 | 11 | ||
diff --git a/crates/assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/assists/src/handlers/extract_struct_from_enum_variant.rs index cac77c49b..d85767b4e 100644 --- a/crates/assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/crates/assists/src/handlers/extract_struct_from_enum_variant.rs | |||
@@ -2,6 +2,10 @@ use std::iter; | |||
2 | 2 | ||
3 | use either::Either; | 3 | use either::Either; |
4 | use hir::{AsName, EnumVariant, Module, ModuleDef, Name}; | 4 | use hir::{AsName, EnumVariant, Module, ModuleDef, Name}; |
5 | use ide_db::helpers::{ | ||
6 | insert_use::{insert_use, ImportScope}, | ||
7 | mod_path_to_ast, | ||
8 | }; | ||
5 | use ide_db::{defs::Definition, search::Reference, RootDatabase}; | 9 | use ide_db::{defs::Definition, search::Reference, RootDatabase}; |
6 | use rustc_hash::{FxHashMap, FxHashSet}; | 10 | use rustc_hash::{FxHashMap, FxHashSet}; |
7 | use syntax::{ | 11 | use syntax::{ |
@@ -10,10 +14,7 @@ use syntax::{ | |||
10 | SourceFile, SyntaxElement, SyntaxNode, T, | 14 | SourceFile, SyntaxElement, SyntaxNode, T, |
11 | }; | 15 | }; |
12 | 16 | ||
13 | use crate::{ | 17 | use crate::{AssistContext, AssistId, AssistKind, Assists}; |
14 | utils::{insert_use, mod_path_to_ast, ImportScope}, | ||
15 | AssistContext, AssistId, AssistKind, Assists, | ||
16 | }; | ||
17 | 18 | ||
18 | // Assist: extract_struct_from_enum_variant | 19 | // Assist: extract_struct_from_enum_variant |
19 | // | 20 | // |
@@ -236,10 +237,9 @@ fn update_reference( | |||
236 | 237 | ||
237 | #[cfg(test)] | 238 | #[cfg(test)] |
238 | mod tests { | 239 | mod tests { |
239 | use crate::{ | 240 | use ide_db::helpers::FamousDefs; |
240 | tests::{check_assist, check_assist_not_applicable}, | 241 | |
241 | utils::FamousDefs, | 242 | use crate::tests::{check_assist, check_assist_not_applicable}; |
242 | }; | ||
243 | 243 | ||
244 | use super::*; | 244 | use super::*; |
245 | 245 | ||
diff --git a/crates/assists/src/handlers/fill_match_arms.rs b/crates/assists/src/handlers/fill_match_arms.rs index eda45f5b3..ef12ef0cf 100644 --- a/crates/assists/src/handlers/fill_match_arms.rs +++ b/crates/assists/src/handlers/fill_match_arms.rs | |||
@@ -1,13 +1,14 @@ | |||
1 | use std::iter; | 1 | use std::iter; |
2 | 2 | ||
3 | use hir::{Adt, HasSource, ModuleDef, Semantics}; | 3 | use hir::{Adt, HasSource, ModuleDef, Semantics}; |
4 | use ide_db::helpers::{mod_path_to_ast, FamousDefs}; | ||
4 | use ide_db::RootDatabase; | 5 | use ide_db::RootDatabase; |
5 | use itertools::Itertools; | 6 | use itertools::Itertools; |
6 | use syntax::ast::{self, make, AstNode, MatchArm, NameOwner, Pat}; | 7 | use syntax::ast::{self, make, AstNode, MatchArm, NameOwner, Pat}; |
7 | use test_utils::mark; | 8 | use test_utils::mark; |
8 | 9 | ||
9 | use crate::{ | 10 | use crate::{ |
10 | utils::{mod_path_to_ast, render_snippet, Cursor, FamousDefs}, | 11 | utils::{render_snippet, Cursor}, |
11 | AssistContext, AssistId, AssistKind, Assists, | 12 | AssistContext, AssistId, AssistKind, Assists, |
12 | }; | 13 | }; |
13 | 14 | ||
@@ -212,12 +213,10 @@ fn build_pat(db: &RootDatabase, module: hir::Module, var: hir::EnumVariant) -> O | |||
212 | 213 | ||
213 | #[cfg(test)] | 214 | #[cfg(test)] |
214 | mod tests { | 215 | mod tests { |
216 | use ide_db::helpers::FamousDefs; | ||
215 | use test_utils::mark; | 217 | use test_utils::mark; |
216 | 218 | ||
217 | use crate::{ | 219 | use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; |
218 | tests::{check_assist, check_assist_not_applicable, check_assist_target}, | ||
219 | utils::FamousDefs, | ||
220 | }; | ||
221 | 220 | ||
222 | use super::fill_match_arms; | 221 | use super::fill_match_arms; |
223 | 222 | ||
diff --git a/crates/assists/src/handlers/generate_from_impl_for_enum.rs b/crates/assists/src/handlers/generate_from_impl_for_enum.rs index 674e5a175..3c374e5d9 100644 --- a/crates/assists/src/handlers/generate_from_impl_for_enum.rs +++ b/crates/assists/src/handlers/generate_from_impl_for_enum.rs | |||
@@ -1,8 +1,9 @@ | |||
1 | use ide_db::helpers::FamousDefs; | ||
1 | use ide_db::RootDatabase; | 2 | use ide_db::RootDatabase; |
2 | use syntax::ast::{self, AstNode, NameOwner}; | 3 | use syntax::ast::{self, AstNode, NameOwner}; |
3 | use test_utils::mark; | 4 | use test_utils::mark; |
4 | 5 | ||
5 | use crate::{utils::FamousDefs, AssistContext, AssistId, AssistKind, Assists}; | 6 | use crate::{AssistContext, AssistId, AssistKind, Assists}; |
6 | 7 | ||
7 | // Assist: generate_from_impl_for_enum | 8 | // Assist: generate_from_impl_for_enum |
8 | // | 9 | // |
diff --git a/crates/assists/src/handlers/ignore_test.rs b/crates/assists/src/handlers/ignore_test.rs index d2339184f..5096a0005 100644 --- a/crates/assists/src/handlers/ignore_test.rs +++ b/crates/assists/src/handlers/ignore_test.rs | |||
@@ -1,4 +1,7 @@ | |||
1 | use syntax::{ast, AstNode}; | 1 | use syntax::{ |
2 | ast::{self, AttrsOwner}, | ||
3 | AstNode, AstToken, | ||
4 | }; | ||
2 | 5 | ||
3 | use crate::{utils::test_related_attribute, AssistContext, AssistId, AssistKind, Assists}; | 6 | use crate::{utils::test_related_attribute, AssistContext, AssistId, AssistKind, Assists}; |
4 | 7 | ||
@@ -25,10 +28,76 @@ pub(crate) fn ignore_test(acc: &mut Assists, ctx: &AssistContext) -> Option<()> | |||
25 | let func = attr.syntax().parent().and_then(ast::Fn::cast)?; | 28 | let func = attr.syntax().parent().and_then(ast::Fn::cast)?; |
26 | let attr = test_related_attribute(&func)?; | 29 | let attr = test_related_attribute(&func)?; |
27 | 30 | ||
28 | acc.add( | 31 | match has_ignore_attribute(&func) { |
29 | AssistId("ignore_test", AssistKind::None), | 32 | None => acc.add( |
30 | "Ignore this test", | 33 | AssistId("ignore_test", AssistKind::None), |
31 | attr.syntax().text_range(), | 34 | "Ignore this test", |
32 | |builder| builder.insert(attr.syntax().text_range().end(), &format!("\n#[ignore]")), | 35 | attr.syntax().text_range(), |
33 | ) | 36 | |builder| builder.insert(attr.syntax().text_range().end(), &format!("\n#[ignore]")), |
37 | ), | ||
38 | Some(ignore_attr) => acc.add( | ||
39 | AssistId("unignore_test", AssistKind::None), | ||
40 | "Re-enable this test", | ||
41 | ignore_attr.syntax().text_range(), | ||
42 | |builder| { | ||
43 | builder.delete(ignore_attr.syntax().text_range()); | ||
44 | let whitespace = ignore_attr | ||
45 | .syntax() | ||
46 | .next_sibling_or_token() | ||
47 | .and_then(|x| x.into_token()) | ||
48 | .and_then(ast::Whitespace::cast); | ||
49 | if let Some(whitespace) = whitespace { | ||
50 | builder.delete(whitespace.syntax().text_range()); | ||
51 | } | ||
52 | }, | ||
53 | ), | ||
54 | } | ||
55 | } | ||
56 | |||
57 | fn has_ignore_attribute(fn_def: &ast::Fn) -> Option<ast::Attr> { | ||
58 | fn_def.attrs().find_map(|attr| { | ||
59 | if attr.path()?.syntax().text() == "ignore" { | ||
60 | Some(attr) | ||
61 | } else { | ||
62 | None | ||
63 | } | ||
64 | }) | ||
65 | } | ||
66 | |||
67 | #[cfg(test)] | ||
68 | mod tests { | ||
69 | use super::ignore_test; | ||
70 | use crate::tests::check_assist; | ||
71 | |||
72 | #[test] | ||
73 | fn test_base_case() { | ||
74 | check_assist( | ||
75 | ignore_test, | ||
76 | r#" | ||
77 | #[test<|>] | ||
78 | fn test() {} | ||
79 | "#, | ||
80 | r#" | ||
81 | #[test] | ||
82 | #[ignore] | ||
83 | fn test() {} | ||
84 | "#, | ||
85 | ) | ||
86 | } | ||
87 | |||
88 | #[test] | ||
89 | fn test_unignore() { | ||
90 | check_assist( | ||
91 | ignore_test, | ||
92 | r#" | ||
93 | #[test<|>] | ||
94 | #[ignore] | ||
95 | fn test() {} | ||
96 | "#, | ||
97 | r#" | ||
98 | #[test] | ||
99 | fn test() {} | ||
100 | "#, | ||
101 | ) | ||
102 | } | ||
34 | } | 103 | } |
diff --git a/crates/assists/src/handlers/infer_function_return_type.rs b/crates/assists/src/handlers/infer_function_return_type.rs index 520d07ae0..aa584eb03 100644 --- a/crates/assists/src/handlers/infer_function_return_type.rs +++ b/crates/assists/src/handlers/infer_function_return_type.rs | |||
@@ -17,7 +17,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; | |||
17 | // fn foo() -> i32 { 42i32 } | 17 | // fn foo() -> i32 { 42i32 } |
18 | // ``` | 18 | // ``` |
19 | pub(crate) fn infer_function_return_type(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 19 | pub(crate) fn infer_function_return_type(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
20 | let (tail_expr, builder_edit_pos, wrap_expr) = extract_tail(ctx)?; | 20 | let (fn_type, tail_expr, builder_edit_pos) = extract_tail(ctx)?; |
21 | let module = ctx.sema.scope(tail_expr.syntax()).module()?; | 21 | let module = ctx.sema.scope(tail_expr.syntax()).module()?; |
22 | let ty = ctx.sema.type_of_expr(&tail_expr)?; | 22 | let ty = ctx.sema.type_of_expr(&tail_expr)?; |
23 | if ty.is_unit() { | 23 | if ty.is_unit() { |
@@ -27,7 +27,10 @@ pub(crate) fn infer_function_return_type(acc: &mut Assists, ctx: &AssistContext) | |||
27 | 27 | ||
28 | acc.add( | 28 | acc.add( |
29 | AssistId("infer_function_return_type", AssistKind::RefactorRewrite), | 29 | AssistId("infer_function_return_type", AssistKind::RefactorRewrite), |
30 | "Add this function's return type", | 30 | match fn_type { |
31 | FnType::Function => "Add this function's return type", | ||
32 | FnType::Closure { .. } => "Add this closure's return type", | ||
33 | }, | ||
31 | tail_expr.syntax().text_range(), | 34 | tail_expr.syntax().text_range(), |
32 | |builder| { | 35 | |builder| { |
33 | match builder_edit_pos { | 36 | match builder_edit_pos { |
@@ -38,7 +41,7 @@ pub(crate) fn infer_function_return_type(acc: &mut Assists, ctx: &AssistContext) | |||
38 | builder.replace(text_range, &format!("-> {}", ty)) | 41 | builder.replace(text_range, &format!("-> {}", ty)) |
39 | } | 42 | } |
40 | } | 43 | } |
41 | if wrap_expr { | 44 | if let FnType::Closure { wrap_expr: true } = fn_type { |
42 | mark::hit!(wrap_closure_non_block_expr); | 45 | mark::hit!(wrap_closure_non_block_expr); |
43 | // `|x| x` becomes `|x| -> T x` which is invalid, so wrap it in a block | 46 | // `|x| x` becomes `|x| -> T x` which is invalid, so wrap it in a block |
44 | builder.replace(tail_expr.syntax().text_range(), &format!("{{{}}}", tail_expr)); | 47 | builder.replace(tail_expr.syntax().text_range(), &format!("{{{}}}", tail_expr)); |
@@ -72,8 +75,13 @@ fn ret_ty_to_action(ret_ty: Option<ast::RetType>, insert_pos: TextSize) -> Optio | |||
72 | } | 75 | } |
73 | } | 76 | } |
74 | 77 | ||
75 | fn extract_tail(ctx: &AssistContext) -> Option<(ast::Expr, InsertOrReplace, bool)> { | 78 | enum FnType { |
76 | let (tail_expr, return_type_range, action, wrap_expr) = | 79 | Function, |
80 | Closure { wrap_expr: bool }, | ||
81 | } | ||
82 | |||
83 | fn extract_tail(ctx: &AssistContext) -> Option<(FnType, ast::Expr, InsertOrReplace)> { | ||
84 | let (fn_type, tail_expr, return_type_range, action) = | ||
77 | if let Some(closure) = ctx.find_node_at_offset::<ast::ClosureExpr>() { | 85 | if let Some(closure) = ctx.find_node_at_offset::<ast::ClosureExpr>() { |
78 | let rpipe_pos = closure.param_list()?.syntax().last_token()?.text_range().end(); | 86 | let rpipe_pos = closure.param_list()?.syntax().last_token()?.text_range().end(); |
79 | let action = ret_ty_to_action(closure.ret_type(), rpipe_pos)?; | 87 | let action = ret_ty_to_action(closure.ret_type(), rpipe_pos)?; |
@@ -86,7 +94,7 @@ fn extract_tail(ctx: &AssistContext) -> Option<(ast::Expr, InsertOrReplace, bool | |||
86 | }; | 94 | }; |
87 | 95 | ||
88 | let ret_range = TextRange::new(rpipe_pos, body_start); | 96 | let ret_range = TextRange::new(rpipe_pos, body_start); |
89 | (tail_expr, ret_range, action, wrap_expr) | 97 | (FnType::Closure { wrap_expr }, tail_expr, ret_range, action) |
90 | } else { | 98 | } else { |
91 | let func = ctx.find_node_at_offset::<ast::Fn>()?; | 99 | let func = ctx.find_node_at_offset::<ast::Fn>()?; |
92 | let rparen_pos = func.param_list()?.r_paren_token()?.text_range().end(); | 100 | let rparen_pos = func.param_list()?.r_paren_token()?.text_range().end(); |
@@ -97,7 +105,7 @@ fn extract_tail(ctx: &AssistContext) -> Option<(ast::Expr, InsertOrReplace, bool | |||
97 | 105 | ||
98 | let ret_range_end = body.l_curly_token()?.text_range().start(); | 106 | let ret_range_end = body.l_curly_token()?.text_range().start(); |
99 | let ret_range = TextRange::new(rparen_pos, ret_range_end); | 107 | let ret_range = TextRange::new(rparen_pos, ret_range_end); |
100 | (tail_expr, ret_range, action, false) | 108 | (FnType::Function, tail_expr, ret_range, action) |
101 | }; | 109 | }; |
102 | let frange = ctx.frange.range; | 110 | let frange = ctx.frange.range; |
103 | if return_type_range.contains_range(frange) { | 111 | if return_type_range.contains_range(frange) { |
@@ -109,7 +117,7 @@ fn extract_tail(ctx: &AssistContext) -> Option<(ast::Expr, InsertOrReplace, bool | |||
109 | } else { | 117 | } else { |
110 | return None; | 118 | return None; |
111 | } | 119 | } |
112 | Some((tail_expr, action, wrap_expr)) | 120 | Some((fn_type, tail_expr, action)) |
113 | } | 121 | } |
114 | 122 | ||
115 | #[cfg(test)] | 123 | #[cfg(test)] |
diff --git a/crates/assists/src/handlers/merge_imports.rs b/crates/assists/src/handlers/merge_imports.rs index fd9c9e03c..b7e853994 100644 --- a/crates/assists/src/handlers/merge_imports.rs +++ b/crates/assists/src/handlers/merge_imports.rs | |||
@@ -1,3 +1,4 @@ | |||
1 | use ide_db::helpers::insert_use::{try_merge_imports, try_merge_trees, MergeBehaviour}; | ||
1 | use syntax::{ | 2 | use syntax::{ |
2 | algo::{neighbor, SyntaxRewriter}, | 3 | algo::{neighbor, SyntaxRewriter}, |
3 | ast, AstNode, | 4 | ast, AstNode, |
@@ -5,10 +6,7 @@ use syntax::{ | |||
5 | 6 | ||
6 | use crate::{ | 7 | use crate::{ |
7 | assist_context::{AssistContext, Assists}, | 8 | assist_context::{AssistContext, Assists}, |
8 | utils::{ | 9 | utils::next_prev, |
9 | insert_use::{try_merge_imports, try_merge_trees}, | ||
10 | next_prev, MergeBehaviour, | ||
11 | }, | ||
12 | AssistId, AssistKind, | 10 | AssistId, AssistKind, |
13 | }; | 11 | }; |
14 | 12 | ||
diff --git a/crates/assists/src/handlers/qualify_path.rs b/crates/assists/src/handlers/qualify_path.rs index d5bc4e574..6f9810fe8 100644 --- a/crates/assists/src/handlers/qualify_path.rs +++ b/crates/assists/src/handlers/qualify_path.rs | |||
@@ -1,6 +1,7 @@ | |||
1 | use std::iter; | 1 | use std::iter; |
2 | 2 | ||
3 | use hir::AsName; | 3 | use hir::AsName; |
4 | use ide_db::helpers::mod_path_to_ast; | ||
4 | use ide_db::RootDatabase; | 5 | use ide_db::RootDatabase; |
5 | use syntax::{ | 6 | use syntax::{ |
6 | ast, | 7 | ast, |
@@ -12,7 +13,6 @@ use test_utils::mark; | |||
12 | use crate::{ | 13 | use crate::{ |
13 | assist_context::{AssistContext, Assists}, | 14 | assist_context::{AssistContext, Assists}, |
14 | utils::import_assets::{ImportAssets, ImportCandidate}, | 15 | utils::import_assets::{ImportAssets, ImportCandidate}, |
15 | utils::mod_path_to_ast, | ||
16 | AssistId, AssistKind, GroupLabel, | 16 | AssistId, AssistKind, GroupLabel, |
17 | }; | 17 | }; |
18 | 18 | ||
diff --git a/crates/assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/assists/src/handlers/replace_derive_with_manual_impl.rs index 453a6cebf..4d6a1956b 100644 --- a/crates/assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/crates/assists/src/handlers/replace_derive_with_manual_impl.rs | |||
@@ -1,3 +1,4 @@ | |||
1 | use ide_db::helpers::mod_path_to_ast; | ||
1 | use ide_db::imports_locator; | 2 | use ide_db::imports_locator; |
2 | use itertools::Itertools; | 3 | use itertools::Itertools; |
3 | use syntax::{ | 4 | use syntax::{ |
@@ -10,8 +11,7 @@ use syntax::{ | |||
10 | use crate::{ | 11 | use crate::{ |
11 | assist_context::{AssistBuilder, AssistContext, Assists}, | 12 | assist_context::{AssistBuilder, AssistContext, Assists}, |
12 | utils::{ | 13 | utils::{ |
13 | add_trait_assoc_items_to_impl, filter_assoc_items, mod_path_to_ast, render_snippet, Cursor, | 14 | add_trait_assoc_items_to_impl, filter_assoc_items, render_snippet, Cursor, DefaultMethods, |
14 | DefaultMethods, | ||
15 | }, | 15 | }, |
16 | AssistId, AssistKind, | 16 | AssistId, AssistKind, |
17 | }; | 17 | }; |
diff --git a/crates/assists/src/handlers/replace_qualified_name_with_use.rs b/crates/assists/src/handlers/replace_qualified_name_with_use.rs index a66db9ae3..8bdf9eea5 100644 --- a/crates/assists/src/handlers/replace_qualified_name_with_use.rs +++ b/crates/assists/src/handlers/replace_qualified_name_with_use.rs | |||
@@ -1,10 +1,8 @@ | |||
1 | use ide_db::helpers::insert_use::{insert_use, ImportScope}; | ||
1 | use syntax::{algo::SyntaxRewriter, ast, match_ast, AstNode, SyntaxNode}; | 2 | use syntax::{algo::SyntaxRewriter, ast, match_ast, AstNode, SyntaxNode}; |
2 | use test_utils::mark; | 3 | use test_utils::mark; |
3 | 4 | ||
4 | use crate::{ | 5 | use crate::{AssistContext, AssistId, AssistKind, Assists}; |
5 | utils::{insert_use, ImportScope}, | ||
6 | AssistContext, AssistId, AssistKind, Assists, | ||
7 | }; | ||
8 | 6 | ||
9 | // Assist: replace_qualified_name_with_use | 7 | // Assist: replace_qualified_name_with_use |
10 | // | 8 | // |
diff --git a/crates/assists/src/utils.rs b/crates/assists/src/utils.rs index 66c0cdd5f..01f5c291f 100644 --- a/crates/assists/src/utils.rs +++ b/crates/assists/src/utils.rs | |||
@@ -1,10 +1,9 @@ | |||
1 | //! Assorted functions shared by several assists. | 1 | //! Assorted functions shared by several assists. |
2 | pub(crate) mod insert_use; | ||
3 | pub(crate) mod import_assets; | 2 | pub(crate) mod import_assets; |
4 | 3 | ||
5 | use std::ops; | 4 | use std::ops; |
6 | 5 | ||
7 | use hir::{Crate, Enum, HasSource, Module, ScopeDef, Semantics, Trait}; | 6 | use hir::HasSource; |
8 | use ide_db::RootDatabase; | 7 | use ide_db::RootDatabase; |
9 | use itertools::Itertools; | 8 | use itertools::Itertools; |
10 | use syntax::{ | 9 | use syntax::{ |
@@ -22,29 +21,6 @@ use crate::{ | |||
22 | ast_transform::{self, AstTransform, QualifyPaths, SubstituteTypeParams}, | 21 | ast_transform::{self, AstTransform, QualifyPaths, SubstituteTypeParams}, |
23 | }; | 22 | }; |
24 | 23 | ||
25 | pub use insert_use::{insert_use, ImportScope, MergeBehaviour}; | ||
26 | |||
27 | pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path { | ||
28 | let mut segments = Vec::new(); | ||
29 | let mut is_abs = false; | ||
30 | match path.kind { | ||
31 | hir::PathKind::Plain => {} | ||
32 | hir::PathKind::Super(0) => segments.push(make::path_segment_self()), | ||
33 | hir::PathKind::Super(n) => segments.extend((0..n).map(|_| make::path_segment_super())), | ||
34 | hir::PathKind::DollarCrate(_) | hir::PathKind::Crate => { | ||
35 | segments.push(make::path_segment_crate()) | ||
36 | } | ||
37 | hir::PathKind::Abs => is_abs = true, | ||
38 | } | ||
39 | |||
40 | segments.extend( | ||
41 | path.segments | ||
42 | .iter() | ||
43 | .map(|segment| make::path_segment(make::name_ref(&segment.to_string()))), | ||
44 | ); | ||
45 | make::path_from_segments(segments, is_abs) | ||
46 | } | ||
47 | |||
48 | pub(crate) fn unwrap_trivial_block(block: ast::BlockExpr) -> ast::Expr { | 24 | pub(crate) fn unwrap_trivial_block(block: ast::BlockExpr) -> ast::Expr { |
49 | extract_trivial_expression(&block) | 25 | extract_trivial_expression(&block) |
50 | .filter(|expr| !expr.syntax().text().contains_char('\n')) | 26 | .filter(|expr| !expr.syntax().text().contains_char('\n')) |
@@ -259,179 +235,6 @@ fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> { | |||
259 | } | 235 | } |
260 | } | 236 | } |
261 | 237 | ||
262 | /// Helps with finding well-know things inside the standard library. This is | ||
263 | /// somewhat similar to the known paths infra inside hir, but it different; We | ||
264 | /// want to make sure that IDE specific paths don't become interesting inside | ||
265 | /// the compiler itself as well. | ||
266 | pub struct FamousDefs<'a, 'b>(pub &'a Semantics<'b, RootDatabase>, pub Option<Crate>); | ||
267 | |||
268 | #[allow(non_snake_case)] | ||
269 | impl FamousDefs<'_, '_> { | ||
270 | pub const FIXTURE: &'static str = r#"//- /libcore.rs crate:core | ||
271 | pub mod convert { | ||
272 | pub trait From<T> { | ||
273 | fn from(t: T) -> Self; | ||
274 | } | ||
275 | } | ||
276 | |||
277 | pub mod default { | ||
278 | pub trait Default { | ||
279 | fn default() -> Self; | ||
280 | } | ||
281 | } | ||
282 | |||
283 | pub mod iter { | ||
284 | pub use self::traits::{collect::IntoIterator, iterator::Iterator}; | ||
285 | mod traits { | ||
286 | pub(crate) mod iterator { | ||
287 | use crate::option::Option; | ||
288 | pub trait Iterator { | ||
289 | type Item; | ||
290 | fn next(&mut self) -> Option<Self::Item>; | ||
291 | fn by_ref(&mut self) -> &mut Self { | ||
292 | self | ||
293 | } | ||
294 | fn take(self, n: usize) -> crate::iter::Take<Self> { | ||
295 | crate::iter::Take { inner: self } | ||
296 | } | ||
297 | } | ||
298 | |||
299 | impl<I: Iterator> Iterator for &mut I { | ||
300 | type Item = I::Item; | ||
301 | fn next(&mut self) -> Option<I::Item> { | ||
302 | (**self).next() | ||
303 | } | ||
304 | } | ||
305 | } | ||
306 | pub(crate) mod collect { | ||
307 | pub trait IntoIterator { | ||
308 | type Item; | ||
309 | } | ||
310 | } | ||
311 | } | ||
312 | |||
313 | pub use self::sources::*; | ||
314 | pub(crate) mod sources { | ||
315 | use super::Iterator; | ||
316 | use crate::option::Option::{self, *}; | ||
317 | pub struct Repeat<A> { | ||
318 | element: A, | ||
319 | } | ||
320 | |||
321 | pub fn repeat<T>(elt: T) -> Repeat<T> { | ||
322 | Repeat { element: elt } | ||
323 | } | ||
324 | |||
325 | impl<A> Iterator for Repeat<A> { | ||
326 | type Item = A; | ||
327 | |||
328 | fn next(&mut self) -> Option<A> { | ||
329 | None | ||
330 | } | ||
331 | } | ||
332 | } | ||
333 | |||
334 | pub use self::adapters::*; | ||
335 | pub(crate) mod adapters { | ||
336 | use super::Iterator; | ||
337 | use crate::option::Option::{self, *}; | ||
338 | pub struct Take<I> { pub(crate) inner: I } | ||
339 | impl<I> Iterator for Take<I> where I: Iterator { | ||
340 | type Item = <I as Iterator>::Item; | ||
341 | fn next(&mut self) -> Option<<I as Iterator>::Item> { | ||
342 | None | ||
343 | } | ||
344 | } | ||
345 | } | ||
346 | } | ||
347 | |||
348 | pub mod option { | ||
349 | pub enum Option<T> { None, Some(T)} | ||
350 | } | ||
351 | |||
352 | pub mod prelude { | ||
353 | pub use crate::{convert::From, iter::{IntoIterator, Iterator}, option::Option::{self, *}, default::Default}; | ||
354 | } | ||
355 | #[prelude_import] | ||
356 | pub use prelude::*; | ||
357 | "#; | ||
358 | |||
359 | pub fn core(&self) -> Option<Crate> { | ||
360 | self.find_crate("core") | ||
361 | } | ||
362 | |||
363 | pub(crate) fn core_convert_From(&self) -> Option<Trait> { | ||
364 | self.find_trait("core:convert:From") | ||
365 | } | ||
366 | |||
367 | pub(crate) fn core_option_Option(&self) -> Option<Enum> { | ||
368 | self.find_enum("core:option:Option") | ||
369 | } | ||
370 | |||
371 | pub fn core_default_Default(&self) -> Option<Trait> { | ||
372 | self.find_trait("core:default:Default") | ||
373 | } | ||
374 | |||
375 | pub fn core_iter_Iterator(&self) -> Option<Trait> { | ||
376 | self.find_trait("core:iter:traits:iterator:Iterator") | ||
377 | } | ||
378 | |||
379 | pub fn core_iter(&self) -> Option<Module> { | ||
380 | self.find_module("core:iter") | ||
381 | } | ||
382 | |||
383 | fn find_trait(&self, path: &str) -> Option<Trait> { | ||
384 | match self.find_def(path)? { | ||
385 | hir::ScopeDef::ModuleDef(hir::ModuleDef::Trait(it)) => Some(it), | ||
386 | _ => None, | ||
387 | } | ||
388 | } | ||
389 | |||
390 | fn find_enum(&self, path: &str) -> Option<Enum> { | ||
391 | match self.find_def(path)? { | ||
392 | hir::ScopeDef::ModuleDef(hir::ModuleDef::Adt(hir::Adt::Enum(it))) => Some(it), | ||
393 | _ => None, | ||
394 | } | ||
395 | } | ||
396 | |||
397 | fn find_module(&self, path: &str) -> Option<Module> { | ||
398 | match self.find_def(path)? { | ||
399 | hir::ScopeDef::ModuleDef(hir::ModuleDef::Module(it)) => Some(it), | ||
400 | _ => None, | ||
401 | } | ||
402 | } | ||
403 | |||
404 | fn find_crate(&self, name: &str) -> Option<Crate> { | ||
405 | let krate = self.1?; | ||
406 | let db = self.0.db; | ||
407 | let res = | ||
408 | krate.dependencies(db).into_iter().find(|dep| dep.name.to_string() == name)?.krate; | ||
409 | Some(res) | ||
410 | } | ||
411 | |||
412 | fn find_def(&self, path: &str) -> Option<ScopeDef> { | ||
413 | let db = self.0.db; | ||
414 | let mut path = path.split(':'); | ||
415 | let trait_ = path.next_back()?; | ||
416 | let std_crate = path.next()?; | ||
417 | let std_crate = self.find_crate(std_crate)?; | ||
418 | let mut module = std_crate.root_module(db); | ||
419 | for segment in path { | ||
420 | module = module.children(db).find_map(|child| { | ||
421 | let name = child.name(db)?; | ||
422 | if name.to_string() == segment { | ||
423 | Some(child) | ||
424 | } else { | ||
425 | None | ||
426 | } | ||
427 | })?; | ||
428 | } | ||
429 | let def = | ||
430 | module.scope(db, None).into_iter().find(|(name, _def)| name.to_string() == trait_)?.1; | ||
431 | Some(def) | ||
432 | } | ||
433 | } | ||
434 | |||
435 | pub(crate) fn next_prev() -> impl Iterator<Item = Direction> { | 238 | pub(crate) fn next_prev() -> impl Iterator<Item = Direction> { |
436 | [Direction::Next, Direction::Prev].iter().copied() | 239 | [Direction::Next, Direction::Prev].iter().copied() |
437 | } | 240 | } |
diff --git a/crates/completion/Cargo.toml b/crates/completion/Cargo.toml index e7df9d955..35e169a28 100644 --- a/crates/completion/Cargo.toml +++ b/crates/completion/Cargo.toml | |||
@@ -15,7 +15,6 @@ log = "0.4.8" | |||
15 | rustc-hash = "1.1.0" | 15 | rustc-hash = "1.1.0" |
16 | either = "1.6.1" | 16 | either = "1.6.1" |
17 | 17 | ||
18 | assists = { path = "../assists", version = "0.0.0" } | ||
19 | stdx = { path = "../stdx", version = "0.0.0" } | 18 | stdx = { path = "../stdx", version = "0.0.0" } |
20 | syntax = { path = "../syntax", version = "0.0.0" } | 19 | syntax = { path = "../syntax", version = "0.0.0" } |
21 | text_edit = { path = "../text_edit", version = "0.0.0" } | 20 | text_edit = { path = "../text_edit", version = "0.0.0" } |
diff --git a/crates/completion/src/completions/pattern.rs b/crates/completion/src/completions/pattern.rs index 7ab7f09fe..4f63ff0ef 100644 --- a/crates/completion/src/completions/pattern.rs +++ b/crates/completion/src/completions/pattern.rs | |||
@@ -4,7 +4,7 @@ use crate::{CompletionContext, Completions}; | |||
4 | 4 | ||
5 | /// Completes constats and paths in patterns. | 5 | /// Completes constats and paths in patterns. |
6 | pub(crate) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) { | 6 | pub(crate) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) { |
7 | if !ctx.is_pat_binding_or_const { | 7 | if !(ctx.is_pat_binding_or_const || ctx.is_irrefutable_let_pat_binding) { |
8 | return; | 8 | return; |
9 | } | 9 | } |
10 | if ctx.record_pat_syntax.is_some() { | 10 | if ctx.record_pat_syntax.is_some() { |
@@ -14,20 +14,27 @@ pub(crate) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) { | |||
14 | // FIXME: ideally, we should look at the type we are matching against and | 14 | // FIXME: ideally, we should look at the type we are matching against and |
15 | // suggest variants + auto-imports | 15 | // suggest variants + auto-imports |
16 | ctx.scope.process_all_names(&mut |name, res| { | 16 | ctx.scope.process_all_names(&mut |name, res| { |
17 | match &res { | 17 | let add_resolution = match &res { |
18 | hir::ScopeDef::ModuleDef(def) => match def { | 18 | hir::ScopeDef::ModuleDef(def) => { |
19 | hir::ModuleDef::Adt(hir::Adt::Enum(..)) | 19 | if ctx.is_irrefutable_let_pat_binding { |
20 | | hir::ModuleDef::Adt(hir::Adt::Struct(..)) | 20 | matches!(def, hir::ModuleDef::Adt(hir::Adt::Struct(_))) |
21 | | hir::ModuleDef::EnumVariant(..) | 21 | } else { |
22 | | hir::ModuleDef::Const(..) | 22 | matches!( |
23 | | hir::ModuleDef::Module(..) => (), | 23 | def, |
24 | _ => return, | 24 | hir::ModuleDef::Adt(hir::Adt::Enum(..)) |
25 | }, | 25 | | hir::ModuleDef::Adt(hir::Adt::Struct(..)) |
26 | hir::ScopeDef::MacroDef(_) => (), | 26 | | hir::ModuleDef::EnumVariant(..) |
27 | _ => return, | 27 | | hir::ModuleDef::Const(..) |
28 | | hir::ModuleDef::Module(..) | ||
29 | ) | ||
30 | } | ||
31 | } | ||
32 | hir::ScopeDef::MacroDef(_) => true, | ||
33 | _ => false, | ||
28 | }; | 34 | }; |
29 | 35 | if add_resolution { | |
30 | acc.add_resolution(ctx, name.to_string(), &res) | 36 | acc.add_resolution(ctx, name.to_string(), &res); |
37 | } | ||
31 | }); | 38 | }); |
32 | } | 39 | } |
33 | 40 | ||
@@ -85,4 +92,26 @@ fn foo() { | |||
85 | "#]], | 92 | "#]], |
86 | ); | 93 | ); |
87 | } | 94 | } |
95 | |||
96 | #[test] | ||
97 | fn completes_in_irrefutable_let() { | ||
98 | check( | ||
99 | r#" | ||
100 | enum E { X } | ||
101 | use self::E::X; | ||
102 | const Z: E = E::X; | ||
103 | mod m {} | ||
104 | |||
105 | static FOO: E = E::X; | ||
106 | struct Bar { f: u32 } | ||
107 | |||
108 | fn foo() { | ||
109 | let <|> | ||
110 | } | ||
111 | "#, | ||
112 | expect![[r#" | ||
113 | st Bar | ||
114 | "#]], | ||
115 | ); | ||
116 | } | ||
88 | } | 117 | } |
diff --git a/crates/completion/src/completions/record.rs b/crates/completion/src/completions/record.rs index 2049b9d09..eaa44c97d 100644 --- a/crates/completion/src/completions/record.rs +++ b/crates/completion/src/completions/record.rs | |||
@@ -1,5 +1,5 @@ | |||
1 | //! Complete fields in record literals and patterns. | 1 | //! Complete fields in record literals and patterns. |
2 | use assists::utils::FamousDefs; | 2 | use ide_db::helpers::FamousDefs; |
3 | use syntax::ast::Expr; | 3 | use syntax::ast::Expr; |
4 | 4 | ||
5 | use crate::{ | 5 | use crate::{ |
@@ -45,8 +45,8 @@ pub(crate) fn complete_record(acc: &mut Completions, ctx: &CompletionContext) -> | |||
45 | 45 | ||
46 | #[cfg(test)] | 46 | #[cfg(test)] |
47 | mod tests { | 47 | mod tests { |
48 | use assists::utils::FamousDefs; | ||
49 | use expect_test::{expect, Expect}; | 48 | use expect_test::{expect, Expect}; |
49 | use ide_db::helpers::FamousDefs; | ||
50 | 50 | ||
51 | use crate::{test_utils::completion_list, CompletionKind}; | 51 | use crate::{test_utils::completion_list, CompletionKind}; |
52 | 52 | ||
diff --git a/crates/completion/src/completions/unqualified_path.rs b/crates/completion/src/completions/unqualified_path.rs index 86c143b63..81691cd7f 100644 --- a/crates/completion/src/completions/unqualified_path.rs +++ b/crates/completion/src/completions/unqualified_path.rs | |||
@@ -1,8 +1,8 @@ | |||
1 | //! Completion of names from the current scope, e.g. locals and imported items. | 1 | //! Completion of names from the current scope, e.g. locals and imported items. |
2 | 2 | ||
3 | use assists::utils::ImportScope; | ||
4 | use either::Either; | 3 | use either::Either; |
5 | use hir::{Adt, ModuleDef, ScopeDef, Type}; | 4 | use hir::{Adt, ModuleDef, ScopeDef, Type}; |
5 | use ide_db::helpers::insert_use::ImportScope; | ||
6 | use ide_db::imports_locator; | 6 | use ide_db::imports_locator; |
7 | use syntax::AstNode; | 7 | use syntax::AstNode; |
8 | use test_utils::mark; | 8 | use test_utils::mark; |
@@ -44,7 +44,9 @@ pub(crate) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionC | |||
44 | acc.add_resolution(ctx, name.to_string(), &res) | 44 | acc.add_resolution(ctx, name.to_string(), &res) |
45 | }); | 45 | }); |
46 | 46 | ||
47 | fuzzy_completion(acc, ctx).unwrap_or_default() | 47 | if ctx.config.enable_experimental_completions { |
48 | fuzzy_completion(acc, ctx).unwrap_or_default() | ||
49 | } | ||
48 | } | 50 | } |
49 | 51 | ||
50 | fn complete_enum_variants(acc: &mut Completions, ctx: &CompletionContext, ty: &Type) { | 52 | fn complete_enum_variants(acc: &mut Completions, ctx: &CompletionContext, ty: &Type) { |
@@ -79,32 +81,34 @@ fn fuzzy_completion(acc: &mut Completions, ctx: &CompletionContext) -> Option<() | |||
79 | 81 | ||
80 | let potential_import_name = ctx.token.to_string(); | 82 | let potential_import_name = ctx.token.to_string(); |
81 | 83 | ||
82 | let possible_imports = | 84 | let possible_imports = imports_locator::find_similar_imports( |
83 | imports_locator::find_similar_imports(&ctx.sema, ctx.krate?, &potential_import_name, 400) | 85 | &ctx.sema, |
84 | .filter_map(|import_candidate| match import_candidate { | 86 | ctx.krate?, |
85 | // when completing outside the use declaration, modules are pretty useless | 87 | &potential_import_name, |
86 | // and tend to bloat the completion suggestions a lot | 88 | 50, |
87 | Either::Left(ModuleDef::Module(_)) => None, | 89 | true, |
88 | Either::Left(module_def) => Some(( | 90 | ) |
89 | current_module.find_use_path(ctx.db, module_def)?, | 91 | .filter_map(|import_candidate| { |
90 | ScopeDef::ModuleDef(module_def), | 92 | Some(match import_candidate { |
91 | )), | 93 | Either::Left(module_def) => { |
92 | Either::Right(macro_def) => Some(( | 94 | (current_module.find_use_path(ctx.db, module_def)?, ScopeDef::ModuleDef(module_def)) |
93 | current_module.find_use_path(ctx.db, macro_def)?, | 95 | } |
94 | ScopeDef::MacroDef(macro_def), | 96 | Either::Right(macro_def) => { |
95 | )), | 97 | (current_module.find_use_path(ctx.db, macro_def)?, ScopeDef::MacroDef(macro_def)) |
96 | }) | 98 | } |
97 | .filter(|(mod_path, _)| mod_path.len() > 1) | 99 | }) |
98 | .filter_map(|(import_path, definition)| { | 100 | }) |
99 | render_resolution_with_import( | 101 | .filter(|(mod_path, _)| mod_path.len() > 1) |
100 | RenderContext::new(ctx), | 102 | .take(20) |
101 | import_path.clone(), | 103 | .filter_map(|(import_path, definition)| { |
102 | import_scope.clone(), | 104 | render_resolution_with_import( |
103 | ctx.config.merge, | 105 | RenderContext::new(ctx), |
104 | &definition, | 106 | import_path.clone(), |
105 | ) | 107 | import_scope.clone(), |
106 | }) | 108 | ctx.config.merge, |
107 | .take(20); | 109 | &definition, |
110 | ) | ||
111 | }); | ||
108 | 112 | ||
109 | acc.add_all(possible_imports); | 113 | acc.add_all(possible_imports); |
110 | Some(()) | 114 | Some(()) |
diff --git a/crates/completion/src/config.rs b/crates/completion/src/config.rs index 82874ff25..654a76f7b 100644 --- a/crates/completion/src/config.rs +++ b/crates/completion/src/config.rs | |||
@@ -4,11 +4,12 @@ | |||
4 | //! module, and we use to statically check that we only produce snippet | 4 | //! module, and we use to statically check that we only produce snippet |
5 | //! completions if we are allowed to. | 5 | //! completions if we are allowed to. |
6 | 6 | ||
7 | use assists::utils::MergeBehaviour; | 7 | use ide_db::helpers::insert_use::MergeBehaviour; |
8 | 8 | ||
9 | #[derive(Clone, Debug, PartialEq, Eq)] | 9 | #[derive(Clone, Debug, PartialEq, Eq)] |
10 | pub struct CompletionConfig { | 10 | pub struct CompletionConfig { |
11 | pub enable_postfix_completions: bool, | 11 | pub enable_postfix_completions: bool, |
12 | pub enable_experimental_completions: bool, | ||
12 | pub add_call_parenthesis: bool, | 13 | pub add_call_parenthesis: bool, |
13 | pub add_call_argument_snippets: bool, | 14 | pub add_call_argument_snippets: bool, |
14 | pub snippet_cap: Option<SnippetCap>, | 15 | pub snippet_cap: Option<SnippetCap>, |
@@ -30,6 +31,7 @@ impl Default for CompletionConfig { | |||
30 | fn default() -> Self { | 31 | fn default() -> Self { |
31 | CompletionConfig { | 32 | CompletionConfig { |
32 | enable_postfix_completions: true, | 33 | enable_postfix_completions: true, |
34 | enable_experimental_completions: true, | ||
33 | add_call_parenthesis: true, | 35 | add_call_parenthesis: true, |
34 | add_call_argument_snippets: true, | 36 | add_call_argument_snippets: true, |
35 | snippet_cap: Some(SnippetCap { _private: () }), | 37 | snippet_cap: Some(SnippetCap { _private: () }), |
diff --git a/crates/completion/src/context.rs b/crates/completion/src/context.rs index bf70ee478..5cd11cf77 100644 --- a/crates/completion/src/context.rs +++ b/crates/completion/src/context.rs | |||
@@ -51,6 +51,7 @@ pub(crate) struct CompletionContext<'a> { | |||
51 | /// If a name-binding or reference to a const in a pattern. | 51 | /// If a name-binding or reference to a const in a pattern. |
52 | /// Irrefutable patterns (like let) are excluded. | 52 | /// Irrefutable patterns (like let) are excluded. |
53 | pub(super) is_pat_binding_or_const: bool, | 53 | pub(super) is_pat_binding_or_const: bool, |
54 | pub(super) is_irrefutable_let_pat_binding: bool, | ||
54 | /// A single-indent path, like `foo`. `::foo` should not be considered a trivial path. | 55 | /// A single-indent path, like `foo`. `::foo` should not be considered a trivial path. |
55 | pub(super) is_trivial_path: bool, | 56 | pub(super) is_trivial_path: bool, |
56 | /// If not a trivial path, the prefix (qualifier). | 57 | /// If not a trivial path, the prefix (qualifier). |
@@ -146,6 +147,7 @@ impl<'a> CompletionContext<'a> { | |||
146 | active_parameter: ActiveParameter::at(db, position), | 147 | active_parameter: ActiveParameter::at(db, position), |
147 | is_param: false, | 148 | is_param: false, |
148 | is_pat_binding_or_const: false, | 149 | is_pat_binding_or_const: false, |
150 | is_irrefutable_let_pat_binding: false, | ||
149 | is_trivial_path: false, | 151 | is_trivial_path: false, |
150 | path_qual: None, | 152 | path_qual: None, |
151 | after_if: false, | 153 | after_if: false, |
@@ -330,6 +332,7 @@ impl<'a> CompletionContext<'a> { | |||
330 | if pat.syntax().text_range().contains_range(bind_pat.syntax().text_range()) | 332 | if pat.syntax().text_range().contains_range(bind_pat.syntax().text_range()) |
331 | { | 333 | { |
332 | self.is_pat_binding_or_const = false; | 334 | self.is_pat_binding_or_const = false; |
335 | self.is_irrefutable_let_pat_binding = true; | ||
333 | } | 336 | } |
334 | } | 337 | } |
335 | } | 338 | } |
diff --git a/crates/completion/src/item.rs b/crates/completion/src/item.rs index b13c3f376..e85549fef 100644 --- a/crates/completion/src/item.rs +++ b/crates/completion/src/item.rs | |||
@@ -2,8 +2,11 @@ | |||
2 | 2 | ||
3 | use std::fmt; | 3 | use std::fmt; |
4 | 4 | ||
5 | use assists::utils::{insert_use, mod_path_to_ast, ImportScope, MergeBehaviour}; | ||
6 | use hir::{Documentation, ModPath, Mutability}; | 5 | use hir::{Documentation, ModPath, Mutability}; |
6 | use ide_db::helpers::{ | ||
7 | insert_use::{self, ImportScope, MergeBehaviour}, | ||
8 | mod_path_to_ast, | ||
9 | }; | ||
7 | use syntax::{algo, TextRange}; | 10 | use syntax::{algo, TextRange}; |
8 | use text_edit::TextEdit; | 11 | use text_edit::TextEdit; |
9 | 12 | ||
@@ -201,7 +204,7 @@ impl CompletionItem { | |||
201 | trigger_call_info: None, | 204 | trigger_call_info: None, |
202 | score: None, | 205 | score: None, |
203 | ref_match: None, | 206 | ref_match: None, |
204 | import_data: None, | 207 | import_to_add: None, |
205 | } | 208 | } |
206 | } | 209 | } |
207 | 210 | ||
@@ -255,13 +258,21 @@ impl CompletionItem { | |||
255 | } | 258 | } |
256 | } | 259 | } |
257 | 260 | ||
261 | /// An extra import to add after the completion is applied. | ||
262 | #[derive(Clone)] | ||
263 | pub(crate) struct ImportToAdd { | ||
264 | pub(crate) import_path: ModPath, | ||
265 | pub(crate) import_scope: ImportScope, | ||
266 | pub(crate) merge_behaviour: Option<MergeBehaviour>, | ||
267 | } | ||
268 | |||
258 | /// A helper to make `CompletionItem`s. | 269 | /// A helper to make `CompletionItem`s. |
259 | #[must_use] | 270 | #[must_use] |
260 | #[derive(Clone)] | 271 | #[derive(Clone)] |
261 | pub(crate) struct Builder { | 272 | pub(crate) struct Builder { |
262 | source_range: TextRange, | 273 | source_range: TextRange, |
263 | completion_kind: CompletionKind, | 274 | completion_kind: CompletionKind, |
264 | import_data: Option<(ModPath, ImportScope, Option<MergeBehaviour>)>, | 275 | import_to_add: Option<ImportToAdd>, |
265 | label: String, | 276 | label: String, |
266 | insert_text: Option<String>, | 277 | insert_text: Option<String>, |
267 | insert_text_format: InsertTextFormat, | 278 | insert_text_format: InsertTextFormat, |
@@ -278,14 +289,16 @@ pub(crate) struct Builder { | |||
278 | 289 | ||
279 | impl Builder { | 290 | impl Builder { |
280 | pub(crate) fn build(self) -> CompletionItem { | 291 | pub(crate) fn build(self) -> CompletionItem { |
292 | let _p = profile::span("item::Builder::build"); | ||
293 | |||
281 | let mut label = self.label; | 294 | let mut label = self.label; |
282 | let mut lookup = self.lookup; | 295 | let mut lookup = self.lookup; |
283 | let mut insert_text = self.insert_text; | 296 | let mut insert_text = self.insert_text; |
284 | let mut text_edits = TextEdit::builder(); | 297 | let mut text_edits = TextEdit::builder(); |
285 | 298 | ||
286 | if let Some((import_path, import_scope, merge_behaviour)) = self.import_data { | 299 | if let Some(import_data) = self.import_to_add { |
287 | let import = mod_path_to_ast(&import_path); | 300 | let import = mod_path_to_ast(&import_data.import_path); |
288 | let mut import_path_without_last_segment = import_path; | 301 | let mut import_path_without_last_segment = import_data.import_path; |
289 | let _ = import_path_without_last_segment.segments.pop(); | 302 | let _ = import_path_without_last_segment.segments.pop(); |
290 | 303 | ||
291 | if !import_path_without_last_segment.segments.is_empty() { | 304 | if !import_path_without_last_segment.segments.is_empty() { |
@@ -298,7 +311,11 @@ impl Builder { | |||
298 | label = format!("{}::{}", import_path_without_last_segment, label); | 311 | label = format!("{}::{}", import_path_without_last_segment, label); |
299 | } | 312 | } |
300 | 313 | ||
301 | let rewriter = insert_use(&import_scope, import, merge_behaviour); | 314 | let rewriter = insert_use::insert_use( |
315 | &import_data.import_scope, | ||
316 | import, | ||
317 | import_data.merge_behaviour, | ||
318 | ); | ||
302 | if let Some(old_ast) = rewriter.rewrite_root() { | 319 | if let Some(old_ast) = rewriter.rewrite_root() { |
303 | algo::diff(&old_ast, &rewriter.rewrite(&old_ast)).into_text_edit(&mut text_edits); | 320 | algo::diff(&old_ast, &rewriter.rewrite(&old_ast)).into_text_edit(&mut text_edits); |
304 | } | 321 | } |
@@ -390,11 +407,8 @@ impl Builder { | |||
390 | self.trigger_call_info = Some(true); | 407 | self.trigger_call_info = Some(true); |
391 | self | 408 | self |
392 | } | 409 | } |
393 | pub(crate) fn import_data( | 410 | pub(crate) fn add_import(mut self, import_to_add: Option<ImportToAdd>) -> Builder { |
394 | mut self, | 411 | self.import_to_add = import_to_add; |
395 | import_data: Option<(ModPath, ImportScope, Option<MergeBehaviour>)>, | ||
396 | ) -> Builder { | ||
397 | self.import_data = import_data; | ||
398 | self | 412 | self |
399 | } | 413 | } |
400 | pub(crate) fn set_ref_match( | 414 | pub(crate) fn set_ref_match( |
diff --git a/crates/completion/src/lib.rs b/crates/completion/src/lib.rs index ac57683fb..1ec2e9be7 100644 --- a/crates/completion/src/lib.rs +++ b/crates/completion/src/lib.rs | |||
@@ -69,6 +69,13 @@ pub use crate::{ | |||
69 | // fn test_name() {} | 69 | // fn test_name() {} |
70 | // } | 70 | // } |
71 | // ``` | 71 | // ``` |
72 | // | ||
73 | // And experimental completions, enabled with the `rust-analyzer.completion.enableExperimental` setting. | ||
74 | // This flag enables or disables: | ||
75 | // | ||
76 | // - Auto import: additional completion options with automatic `use` import and options from all project importable items, matched for the input | ||
77 | // | ||
78 | // Experimental completions might cause issues with performance and completion list look. | ||
72 | 79 | ||
73 | /// Main entry point for completion. We run completion as a two-phase process. | 80 | /// Main entry point for completion. We run completion as a two-phase process. |
74 | /// | 81 | /// |
diff --git a/crates/completion/src/render.rs b/crates/completion/src/render.rs index e892d4de8..504757a6a 100644 --- a/crates/completion/src/render.rs +++ b/crates/completion/src/render.rs | |||
@@ -9,15 +9,15 @@ pub(crate) mod type_alias; | |||
9 | 9 | ||
10 | mod builder_ext; | 10 | mod builder_ext; |
11 | 11 | ||
12 | use assists::utils::{ImportScope, MergeBehaviour}; | ||
13 | use hir::{Documentation, HasAttrs, HirDisplay, ModPath, Mutability, ScopeDef, Type}; | 12 | use hir::{Documentation, HasAttrs, HirDisplay, ModPath, Mutability, ScopeDef, Type}; |
13 | use ide_db::helpers::insert_use::{ImportScope, MergeBehaviour}; | ||
14 | use ide_db::RootDatabase; | 14 | use ide_db::RootDatabase; |
15 | use syntax::TextRange; | 15 | use syntax::TextRange; |
16 | use test_utils::mark; | 16 | use test_utils::mark; |
17 | 17 | ||
18 | use crate::{ | 18 | use crate::{ |
19 | config::SnippetCap, CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, | 19 | config::SnippetCap, item::ImportToAdd, CompletionContext, CompletionItem, CompletionItemKind, |
20 | CompletionScore, | 20 | CompletionKind, CompletionScore, |
21 | }; | 21 | }; |
22 | 22 | ||
23 | use crate::render::{enum_variant::render_enum_variant, function::render_fn, macro_::render_macro}; | 23 | use crate::render::{enum_variant::render_enum_variant, function::render_fn, macro_::render_macro}; |
@@ -48,15 +48,15 @@ pub(crate) fn render_resolution<'a>( | |||
48 | 48 | ||
49 | pub(crate) fn render_resolution_with_import<'a>( | 49 | pub(crate) fn render_resolution_with_import<'a>( |
50 | ctx: RenderContext<'a>, | 50 | ctx: RenderContext<'a>, |
51 | import: ModPath, | 51 | import_path: ModPath, |
52 | import_scope: ImportScope, | 52 | import_scope: ImportScope, |
53 | merge_behaviour: Option<MergeBehaviour>, | 53 | merge_behaviour: Option<MergeBehaviour>, |
54 | resolution: &ScopeDef, | 54 | resolution: &ScopeDef, |
55 | ) -> Option<CompletionItem> { | 55 | ) -> Option<CompletionItem> { |
56 | let local_name = import.segments.last()?.to_string(); | 56 | let local_name = import_path.segments.last()?.to_string(); |
57 | Render::new(ctx).render_resolution( | 57 | Render::new(ctx).render_resolution( |
58 | local_name, | 58 | local_name, |
59 | Some((import, import_scope, merge_behaviour)), | 59 | Some(ImportToAdd { import_path, import_scope, merge_behaviour }), |
60 | resolution, | 60 | resolution, |
61 | ) | 61 | ) |
62 | } | 62 | } |
@@ -147,9 +147,10 @@ impl<'a> Render<'a> { | |||
147 | fn render_resolution( | 147 | fn render_resolution( |
148 | self, | 148 | self, |
149 | local_name: String, | 149 | local_name: String, |
150 | import_data: Option<(ModPath, ImportScope, Option<MergeBehaviour>)>, | 150 | import_to_add: Option<ImportToAdd>, |
151 | resolution: &ScopeDef, | 151 | resolution: &ScopeDef, |
152 | ) -> Option<CompletionItem> { | 152 | ) -> Option<CompletionItem> { |
153 | let _p = profile::span("render_resolution"); | ||
153 | use hir::ModuleDef::*; | 154 | use hir::ModuleDef::*; |
154 | 155 | ||
155 | let completion_kind = match resolution { | 156 | let completion_kind = match resolution { |
@@ -159,15 +160,16 @@ impl<'a> Render<'a> { | |||
159 | 160 | ||
160 | let kind = match resolution { | 161 | let kind = match resolution { |
161 | ScopeDef::ModuleDef(Function(func)) => { | 162 | ScopeDef::ModuleDef(Function(func)) => { |
162 | let item = render_fn(self.ctx, import_data, Some(local_name), *func); | 163 | let item = render_fn(self.ctx, import_to_add, Some(local_name), *func); |
163 | return Some(item); | 164 | return Some(item); |
164 | } | 165 | } |
165 | ScopeDef::ModuleDef(EnumVariant(var)) => { | 166 | ScopeDef::ModuleDef(EnumVariant(var)) => { |
166 | let item = render_enum_variant(self.ctx, import_data, Some(local_name), *var, None); | 167 | let item = |
168 | render_enum_variant(self.ctx, import_to_add, Some(local_name), *var, None); | ||
167 | return Some(item); | 169 | return Some(item); |
168 | } | 170 | } |
169 | ScopeDef::MacroDef(mac) => { | 171 | ScopeDef::MacroDef(mac) => { |
170 | let item = render_macro(self.ctx, import_data, local_name, *mac); | 172 | let item = render_macro(self.ctx, import_to_add, local_name, *mac); |
171 | return item; | 173 | return item; |
172 | } | 174 | } |
173 | 175 | ||
@@ -192,7 +194,7 @@ impl<'a> Render<'a> { | |||
192 | local_name, | 194 | local_name, |
193 | ) | 195 | ) |
194 | .kind(CompletionItemKind::UnresolvedReference) | 196 | .kind(CompletionItemKind::UnresolvedReference) |
195 | .import_data(import_data) | 197 | .add_import(import_to_add) |
196 | .build(); | 198 | .build(); |
197 | return Some(item); | 199 | return Some(item); |
198 | } | 200 | } |
@@ -247,7 +249,7 @@ impl<'a> Render<'a> { | |||
247 | 249 | ||
248 | let item = item | 250 | let item = item |
249 | .kind(kind) | 251 | .kind(kind) |
250 | .import_data(import_data) | 252 | .add_import(import_to_add) |
251 | .set_documentation(docs) | 253 | .set_documentation(docs) |
252 | .set_ref_match(ref_match) | 254 | .set_ref_match(ref_match) |
253 | .build(); | 255 | .build(); |
diff --git a/crates/completion/src/render/enum_variant.rs b/crates/completion/src/render/enum_variant.rs index 6070e9b1d..f4bd02f25 100644 --- a/crates/completion/src/render/enum_variant.rs +++ b/crates/completion/src/render/enum_variant.rs | |||
@@ -1,23 +1,23 @@ | |||
1 | //! Renderer for `enum` variants. | 1 | //! Renderer for `enum` variants. |
2 | 2 | ||
3 | use assists::utils::{ImportScope, MergeBehaviour}; | ||
4 | use hir::{HasAttrs, HirDisplay, ModPath, StructKind}; | 3 | use hir::{HasAttrs, HirDisplay, ModPath, StructKind}; |
5 | use itertools::Itertools; | 4 | use itertools::Itertools; |
6 | use test_utils::mark; | 5 | use test_utils::mark; |
7 | 6 | ||
8 | use crate::{ | 7 | use crate::{ |
9 | item::{CompletionItem, CompletionItemKind, CompletionKind}, | 8 | item::{CompletionItem, CompletionItemKind, CompletionKind, ImportToAdd}, |
10 | render::{builder_ext::Params, RenderContext}, | 9 | render::{builder_ext::Params, RenderContext}, |
11 | }; | 10 | }; |
12 | 11 | ||
13 | pub(crate) fn render_enum_variant<'a>( | 12 | pub(crate) fn render_enum_variant<'a>( |
14 | ctx: RenderContext<'a>, | 13 | ctx: RenderContext<'a>, |
15 | import_data: Option<(ModPath, ImportScope, Option<MergeBehaviour>)>, | 14 | import_to_add: Option<ImportToAdd>, |
16 | local_name: Option<String>, | 15 | local_name: Option<String>, |
17 | variant: hir::EnumVariant, | 16 | variant: hir::EnumVariant, |
18 | path: Option<ModPath>, | 17 | path: Option<ModPath>, |
19 | ) -> CompletionItem { | 18 | ) -> CompletionItem { |
20 | EnumVariantRender::new(ctx, local_name, variant, path).render(import_data) | 19 | let _p = profile::span("render_enum_variant"); |
20 | EnumVariantRender::new(ctx, local_name, variant, path).render(import_to_add) | ||
21 | } | 21 | } |
22 | 22 | ||
23 | #[derive(Debug)] | 23 | #[derive(Debug)] |
@@ -62,10 +62,7 @@ impl<'a> EnumVariantRender<'a> { | |||
62 | } | 62 | } |
63 | } | 63 | } |
64 | 64 | ||
65 | fn render( | 65 | fn render(self, import_to_add: Option<ImportToAdd>) -> CompletionItem { |
66 | self, | ||
67 | import_data: Option<(ModPath, ImportScope, Option<MergeBehaviour>)>, | ||
68 | ) -> CompletionItem { | ||
69 | let mut builder = CompletionItem::new( | 66 | let mut builder = CompletionItem::new( |
70 | CompletionKind::Reference, | 67 | CompletionKind::Reference, |
71 | self.ctx.source_range(), | 68 | self.ctx.source_range(), |
@@ -74,7 +71,7 @@ impl<'a> EnumVariantRender<'a> { | |||
74 | .kind(CompletionItemKind::EnumVariant) | 71 | .kind(CompletionItemKind::EnumVariant) |
75 | .set_documentation(self.variant.docs(self.ctx.db())) | 72 | .set_documentation(self.variant.docs(self.ctx.db())) |
76 | .set_deprecated(self.ctx.is_deprecated(self.variant)) | 73 | .set_deprecated(self.ctx.is_deprecated(self.variant)) |
77 | .import_data(import_data) | 74 | .add_import(import_to_add) |
78 | .detail(self.detail()); | 75 | .detail(self.detail()); |
79 | 76 | ||
80 | if self.variant_kind == StructKind::Tuple { | 77 | if self.variant_kind == StructKind::Tuple { |
diff --git a/crates/completion/src/render/function.rs b/crates/completion/src/render/function.rs index 9dd5cd18c..542383d7e 100644 --- a/crates/completion/src/render/function.rs +++ b/crates/completion/src/render/function.rs | |||
@@ -1,21 +1,21 @@ | |||
1 | //! Renderer for function calls. | 1 | //! Renderer for function calls. |
2 | 2 | ||
3 | use assists::utils::{ImportScope, MergeBehaviour}; | 3 | use hir::{HasSource, Type}; |
4 | use hir::{HasSource, ModPath, Type}; | ||
5 | use syntax::{ast::Fn, display::function_declaration}; | 4 | use syntax::{ast::Fn, display::function_declaration}; |
6 | 5 | ||
7 | use crate::{ | 6 | use crate::{ |
8 | item::{CompletionItem, CompletionItemKind, CompletionKind}, | 7 | item::{CompletionItem, CompletionItemKind, CompletionKind, ImportToAdd}, |
9 | render::{builder_ext::Params, RenderContext}, | 8 | render::{builder_ext::Params, RenderContext}, |
10 | }; | 9 | }; |
11 | 10 | ||
12 | pub(crate) fn render_fn<'a>( | 11 | pub(crate) fn render_fn<'a>( |
13 | ctx: RenderContext<'a>, | 12 | ctx: RenderContext<'a>, |
14 | import_data: Option<(ModPath, ImportScope, Option<MergeBehaviour>)>, | 13 | import_to_add: Option<ImportToAdd>, |
15 | local_name: Option<String>, | 14 | local_name: Option<String>, |
16 | fn_: hir::Function, | 15 | fn_: hir::Function, |
17 | ) -> CompletionItem { | 16 | ) -> CompletionItem { |
18 | FunctionRender::new(ctx, local_name, fn_).render(import_data) | 17 | let _p = profile::span("render_fn"); |
18 | FunctionRender::new(ctx, local_name, fn_).render(import_to_add) | ||
19 | } | 19 | } |
20 | 20 | ||
21 | #[derive(Debug)] | 21 | #[derive(Debug)] |
@@ -38,10 +38,7 @@ impl<'a> FunctionRender<'a> { | |||
38 | FunctionRender { ctx, name, fn_, ast_node } | 38 | FunctionRender { ctx, name, fn_, ast_node } |
39 | } | 39 | } |
40 | 40 | ||
41 | fn render( | 41 | fn render(self, import_to_add: Option<ImportToAdd>) -> CompletionItem { |
42 | self, | ||
43 | import_data: Option<(ModPath, ImportScope, Option<MergeBehaviour>)>, | ||
44 | ) -> CompletionItem { | ||
45 | let params = self.params(); | 42 | let params = self.params(); |
46 | CompletionItem::new(CompletionKind::Reference, self.ctx.source_range(), self.name.clone()) | 43 | CompletionItem::new(CompletionKind::Reference, self.ctx.source_range(), self.name.clone()) |
47 | .kind(self.kind()) | 44 | .kind(self.kind()) |
@@ -49,7 +46,7 @@ impl<'a> FunctionRender<'a> { | |||
49 | .set_deprecated(self.ctx.is_deprecated(self.fn_)) | 46 | .set_deprecated(self.ctx.is_deprecated(self.fn_)) |
50 | .detail(self.detail()) | 47 | .detail(self.detail()) |
51 | .add_call_parens(self.ctx.completion, self.name, params) | 48 | .add_call_parens(self.ctx.completion, self.name, params) |
52 | .import_data(import_data) | 49 | .add_import(import_to_add) |
53 | .build() | 50 | .build() |
54 | } | 51 | } |
55 | 52 | ||
diff --git a/crates/completion/src/render/macro_.rs b/crates/completion/src/render/macro_.rs index fead59e41..b4ab32c6e 100644 --- a/crates/completion/src/render/macro_.rs +++ b/crates/completion/src/render/macro_.rs | |||
@@ -1,22 +1,22 @@ | |||
1 | //! Renderer for macro invocations. | 1 | //! Renderer for macro invocations. |
2 | 2 | ||
3 | use assists::utils::{ImportScope, MergeBehaviour}; | 3 | use hir::{Documentation, HasSource}; |
4 | use hir::{Documentation, HasSource, ModPath}; | ||
5 | use syntax::display::macro_label; | 4 | use syntax::display::macro_label; |
6 | use test_utils::mark; | 5 | use test_utils::mark; |
7 | 6 | ||
8 | use crate::{ | 7 | use crate::{ |
9 | item::{CompletionItem, CompletionItemKind, CompletionKind}, | 8 | item::{CompletionItem, CompletionItemKind, CompletionKind, ImportToAdd}, |
10 | render::RenderContext, | 9 | render::RenderContext, |
11 | }; | 10 | }; |
12 | 11 | ||
13 | pub(crate) fn render_macro<'a>( | 12 | pub(crate) fn render_macro<'a>( |
14 | ctx: RenderContext<'a>, | 13 | ctx: RenderContext<'a>, |
15 | import_data: Option<(ModPath, ImportScope, Option<MergeBehaviour>)>, | 14 | import_to_add: Option<ImportToAdd>, |
16 | name: String, | 15 | name: String, |
17 | macro_: hir::MacroDef, | 16 | macro_: hir::MacroDef, |
18 | ) -> Option<CompletionItem> { | 17 | ) -> Option<CompletionItem> { |
19 | MacroRender::new(ctx, name, macro_).render(import_data) | 18 | let _p = profile::span("render_macro"); |
19 | MacroRender::new(ctx, name, macro_).render(import_to_add) | ||
20 | } | 20 | } |
21 | 21 | ||
22 | #[derive(Debug)] | 22 | #[derive(Debug)] |
@@ -38,10 +38,7 @@ impl<'a> MacroRender<'a> { | |||
38 | MacroRender { ctx, name, macro_, docs, bra, ket } | 38 | MacroRender { ctx, name, macro_, docs, bra, ket } |
39 | } | 39 | } |
40 | 40 | ||
41 | fn render( | 41 | fn render(&self, import_to_add: Option<ImportToAdd>) -> Option<CompletionItem> { |
42 | &self, | ||
43 | import_data: Option<(ModPath, ImportScope, Option<MergeBehaviour>)>, | ||
44 | ) -> Option<CompletionItem> { | ||
45 | // FIXME: Currently proc-macro do not have ast-node, | 42 | // FIXME: Currently proc-macro do not have ast-node, |
46 | // such that it does not have source | 43 | // such that it does not have source |
47 | if self.macro_.is_proc_macro() { | 44 | if self.macro_.is_proc_macro() { |
@@ -53,7 +50,7 @@ impl<'a> MacroRender<'a> { | |||
53 | .kind(CompletionItemKind::Macro) | 50 | .kind(CompletionItemKind::Macro) |
54 | .set_documentation(self.docs.clone()) | 51 | .set_documentation(self.docs.clone()) |
55 | .set_deprecated(self.ctx.is_deprecated(self.macro_)) | 52 | .set_deprecated(self.ctx.is_deprecated(self.macro_)) |
56 | .import_data(import_data) | 53 | .add_import(import_to_add) |
57 | .detail(self.detail()); | 54 | .detail(self.detail()); |
58 | 55 | ||
59 | let needs_bang = self.needs_bang(); | 56 | let needs_bang = self.needs_bang(); |
diff --git a/crates/flycheck/Cargo.toml b/crates/flycheck/Cargo.toml index 44499bc79..3d9436d69 100644 --- a/crates/flycheck/Cargo.toml +++ b/crates/flycheck/Cargo.toml | |||
@@ -12,7 +12,7 @@ doctest = false | |||
12 | [dependencies] | 12 | [dependencies] |
13 | crossbeam-channel = "0.5.0" | 13 | crossbeam-channel = "0.5.0" |
14 | log = "0.4.8" | 14 | log = "0.4.8" |
15 | cargo_metadata = "0.12.0" | 15 | cargo_metadata = "=0.12.0" |
16 | serde_json = "1.0.48" | 16 | serde_json = "1.0.48" |
17 | jod-thread = "0.1.1" | 17 | jod-thread = "0.1.1" |
18 | 18 | ||
diff --git a/crates/hir/src/code_model.rs b/crates/hir/src/code_model.rs index 37ed092ad..f06b5cd9f 100644 --- a/crates/hir/src/code_model.rs +++ b/crates/hir/src/code_model.rs | |||
@@ -1420,11 +1420,11 @@ impl Type { | |||
1420 | pub fn normalize_trait_assoc_type( | 1420 | pub fn normalize_trait_assoc_type( |
1421 | &self, | 1421 | &self, |
1422 | db: &dyn HirDatabase, | 1422 | db: &dyn HirDatabase, |
1423 | r#trait: Trait, | 1423 | trait_: Trait, |
1424 | args: &[Type], | 1424 | args: &[Type], |
1425 | alias: TypeAlias, | 1425 | alias: TypeAlias, |
1426 | ) -> Option<Type> { | 1426 | ) -> Option<Type> { |
1427 | let subst = Substs::build_for_def(db, r#trait.id) | 1427 | let subst = Substs::build_for_def(db, trait_.id) |
1428 | .push(self.ty.value.clone()) | 1428 | .push(self.ty.value.clone()) |
1429 | .fill(args.iter().map(|t| t.ty.value.clone())) | 1429 | .fill(args.iter().map(|t| t.ty.value.clone())) |
1430 | .build(); | 1430 | .build(); |
diff --git a/crates/hir/src/db.rs b/crates/hir/src/db.rs index 07333c453..8c767b249 100644 --- a/crates/hir/src/db.rs +++ b/crates/hir/src/db.rs | |||
@@ -11,7 +11,7 @@ pub use hir_def::db::{ | |||
11 | }; | 11 | }; |
12 | pub use hir_expand::db::{ | 12 | pub use hir_expand::db::{ |
13 | AstDatabase, AstDatabaseStorage, AstIdMapQuery, InternEagerExpansionQuery, InternMacroQuery, | 13 | AstDatabase, AstDatabaseStorage, AstIdMapQuery, InternEagerExpansionQuery, InternMacroQuery, |
14 | MacroArgTextQuery, MacroDefQuery, MacroExpandQuery, ParseMacroQuery, | 14 | MacroArgTextQuery, MacroDefQuery, MacroExpandQuery, ParseMacroExpansionQuery, |
15 | }; | 15 | }; |
16 | pub use hir_ty::db::*; | 16 | pub use hir_ty::db::*; |
17 | 17 | ||
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index d9ad8db6f..eaf1a14ec 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs | |||
@@ -1,5 +1,5 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | pub use hir_def::diagnostics::{InactiveCode, UnresolvedModule}; | 2 | pub use hir_def::diagnostics::{InactiveCode, UnresolvedModule, UnresolvedProcMacro}; |
3 | pub use hir_expand::diagnostics::{ | 3 | pub use hir_expand::diagnostics::{ |
4 | Diagnostic, DiagnosticCode, DiagnosticSink, DiagnosticSinkBuilder, | 4 | Diagnostic, DiagnosticCode, DiagnosticSink, DiagnosticSinkBuilder, |
5 | }; | 5 | }; |
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 5fea25ef1..93bdb4472 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs | |||
@@ -57,8 +57,8 @@ pub use hir_def::{ | |||
57 | visibility::Visibility, | 57 | visibility::Visibility, |
58 | }; | 58 | }; |
59 | pub use hir_expand::{ | 59 | pub use hir_expand::{ |
60 | name::known, name::AsName, name::Name, HirFileId, InFile, MacroCallId, MacroCallLoc, | 60 | name::known, name::AsName, name::Name, ExpandResult, HirFileId, InFile, MacroCallId, |
61 | /* FIXME */ MacroDefId, MacroFile, Origin, | 61 | MacroCallLoc, /* FIXME */ MacroDefId, MacroFile, Origin, |
62 | }; | 62 | }; |
63 | pub use hir_ty::display::HirDisplay; | 63 | pub use hir_ty::display::HirDisplay; |
64 | 64 | ||
diff --git a/crates/hir_def/src/diagnostics.rs b/crates/hir_def/src/diagnostics.rs index b221b290c..c71266dc0 100644 --- a/crates/hir_def/src/diagnostics.rs +++ b/crates/hir_def/src/diagnostics.rs | |||
@@ -6,7 +6,7 @@ use stdx::format_to; | |||
6 | use cfg::{CfgExpr, CfgOptions, DnfExpr}; | 6 | use cfg::{CfgExpr, CfgOptions, DnfExpr}; |
7 | use hir_expand::diagnostics::{Diagnostic, DiagnosticCode, DiagnosticSink}; | 7 | use hir_expand::diagnostics::{Diagnostic, DiagnosticCode, DiagnosticSink}; |
8 | use hir_expand::{HirFileId, InFile}; | 8 | use hir_expand::{HirFileId, InFile}; |
9 | use syntax::{ast, AstPtr, SyntaxNodePtr}; | 9 | use syntax::{ast, AstPtr, SyntaxNodePtr, TextRange}; |
10 | 10 | ||
11 | use crate::{db::DefDatabase, DefWithBodyId}; | 11 | use crate::{db::DefDatabase, DefWithBodyId}; |
12 | 12 | ||
@@ -127,3 +127,68 @@ impl Diagnostic for InactiveCode { | |||
127 | self | 127 | self |
128 | } | 128 | } |
129 | } | 129 | } |
130 | |||
131 | // Diagnostic: unresolved-proc-macro | ||
132 | // | ||
133 | // This diagnostic is shown when a procedural macro can not be found. This usually means that | ||
134 | // procedural macro support is simply disabled (and hence is only a weak hint instead of an error), | ||
135 | // but can also indicate project setup problems. | ||
136 | #[derive(Debug, Clone, Eq, PartialEq)] | ||
137 | pub struct UnresolvedProcMacro { | ||
138 | pub file: HirFileId, | ||
139 | pub node: SyntaxNodePtr, | ||
140 | /// If the diagnostic can be pinpointed more accurately than via `node`, this is the `TextRange` | ||
141 | /// to use instead. | ||
142 | pub precise_location: Option<TextRange>, | ||
143 | pub macro_name: Option<String>, | ||
144 | } | ||
145 | |||
146 | impl Diagnostic for UnresolvedProcMacro { | ||
147 | fn code(&self) -> DiagnosticCode { | ||
148 | DiagnosticCode("unresolved-proc-macro") | ||
149 | } | ||
150 | |||
151 | fn message(&self) -> String { | ||
152 | match &self.macro_name { | ||
153 | Some(name) => format!("proc macro `{}` not expanded", name), | ||
154 | None => "proc macro not expanded".to_string(), | ||
155 | } | ||
156 | } | ||
157 | |||
158 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
159 | InFile::new(self.file, self.node.clone()) | ||
160 | } | ||
161 | |||
162 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
163 | self | ||
164 | } | ||
165 | } | ||
166 | |||
167 | // Diagnostic: macro-error | ||
168 | // | ||
169 | // This diagnostic is shown for macro expansion errors. | ||
170 | #[derive(Debug, Clone, Eq, PartialEq)] | ||
171 | pub struct MacroError { | ||
172 | pub file: HirFileId, | ||
173 | pub node: SyntaxNodePtr, | ||
174 | pub message: String, | ||
175 | } | ||
176 | |||
177 | impl Diagnostic for MacroError { | ||
178 | fn code(&self) -> DiagnosticCode { | ||
179 | DiagnosticCode("macro-error") | ||
180 | } | ||
181 | fn message(&self) -> String { | ||
182 | self.message.clone() | ||
183 | } | ||
184 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
185 | InFile::new(self.file, self.node.clone()) | ||
186 | } | ||
187 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
188 | self | ||
189 | } | ||
190 | fn is_experimental(&self) -> bool { | ||
191 | // Newly added and not very well-tested, might contain false positives. | ||
192 | true | ||
193 | } | ||
194 | } | ||
diff --git a/crates/hir_def/src/import_map.rs b/crates/hir_def/src/import_map.rs index 1e24f29a8..c0f108848 100644 --- a/crates/hir_def/src/import_map.rs +++ b/crates/hir_def/src/import_map.rs | |||
@@ -7,7 +7,7 @@ use fst::{self, Streamer}; | |||
7 | use hir_expand::name::Name; | 7 | use hir_expand::name::Name; |
8 | use indexmap::{map::Entry, IndexMap}; | 8 | use indexmap::{map::Entry, IndexMap}; |
9 | use itertools::Itertools; | 9 | use itertools::Itertools; |
10 | use rustc_hash::{FxHashMap, FxHasher}; | 10 | use rustc_hash::{FxHashMap, FxHashSet, FxHasher}; |
11 | use smallvec::SmallVec; | 11 | use smallvec::SmallVec; |
12 | use syntax::SmolStr; | 12 | use syntax::SmolStr; |
13 | 13 | ||
@@ -225,6 +225,19 @@ fn cmp((_, lhs): &(&ItemInNs, &ImportInfo), (_, rhs): &(&ItemInNs, &ImportInfo)) | |||
225 | lhs_str.cmp(&rhs_str) | 225 | lhs_str.cmp(&rhs_str) |
226 | } | 226 | } |
227 | 227 | ||
228 | #[derive(Debug, Eq, PartialEq, Hash)] | ||
229 | pub enum ImportKind { | ||
230 | Module, | ||
231 | Function, | ||
232 | Adt, | ||
233 | EnumVariant, | ||
234 | Const, | ||
235 | Static, | ||
236 | Trait, | ||
237 | TypeAlias, | ||
238 | BuiltinType, | ||
239 | } | ||
240 | |||
228 | #[derive(Debug)] | 241 | #[derive(Debug)] |
229 | pub struct Query { | 242 | pub struct Query { |
230 | query: String, | 243 | query: String, |
@@ -232,6 +245,7 @@ pub struct Query { | |||
232 | anchor_end: bool, | 245 | anchor_end: bool, |
233 | case_sensitive: bool, | 246 | case_sensitive: bool, |
234 | limit: usize, | 247 | limit: usize, |
248 | exclude_import_kinds: FxHashSet<ImportKind>, | ||
235 | } | 249 | } |
236 | 250 | ||
237 | impl Query { | 251 | impl Query { |
@@ -242,6 +256,7 @@ impl Query { | |||
242 | anchor_end: false, | 256 | anchor_end: false, |
243 | case_sensitive: false, | 257 | case_sensitive: false, |
244 | limit: usize::max_value(), | 258 | limit: usize::max_value(), |
259 | exclude_import_kinds: FxHashSet::default(), | ||
245 | } | 260 | } |
246 | } | 261 | } |
247 | 262 | ||
@@ -260,6 +275,12 @@ impl Query { | |||
260 | pub fn case_sensitive(self) -> Self { | 275 | pub fn case_sensitive(self) -> Self { |
261 | Self { case_sensitive: true, ..self } | 276 | Self { case_sensitive: true, ..self } |
262 | } | 277 | } |
278 | |||
279 | /// Do not include imports of the specified kind in the search results. | ||
280 | pub fn exclude_import_kind(mut self, import_kind: ImportKind) -> Self { | ||
281 | self.exclude_import_kinds.insert(import_kind); | ||
282 | self | ||
283 | } | ||
263 | } | 284 | } |
264 | 285 | ||
265 | /// Searches dependencies of `krate` for an importable path matching `query`. | 286 | /// Searches dependencies of `krate` for an importable path matching `query`. |
@@ -303,10 +324,17 @@ pub fn search_dependencies<'a>( | |||
303 | 324 | ||
304 | // Add the items from this `ModPath` group. Those are all subsequent items in | 325 | // Add the items from this `ModPath` group. Those are all subsequent items in |
305 | // `importables` whose paths match `path`. | 326 | // `importables` whose paths match `path`. |
306 | let iter = importables.iter().copied().take_while(|item| { | 327 | let iter = importables |
307 | let item_path = &import_map.map[item].path; | 328 | .iter() |
308 | fst_path(item_path) == fst_path(path) | 329 | .copied() |
309 | }); | 330 | .take_while(|item| { |
331 | let item_path = &import_map.map[item].path; | ||
332 | fst_path(item_path) == fst_path(path) | ||
333 | }) | ||
334 | .filter(|&item| match item_import_kind(item) { | ||
335 | Some(import_kind) => !query.exclude_import_kinds.contains(&import_kind), | ||
336 | None => true, | ||
337 | }); | ||
310 | 338 | ||
311 | if query.case_sensitive { | 339 | if query.case_sensitive { |
312 | // FIXME: This does not do a subsequence match. | 340 | // FIXME: This does not do a subsequence match. |
@@ -341,6 +369,20 @@ pub fn search_dependencies<'a>( | |||
341 | res | 369 | res |
342 | } | 370 | } |
343 | 371 | ||
372 | fn item_import_kind(item: ItemInNs) -> Option<ImportKind> { | ||
373 | Some(match item.as_module_def_id()? { | ||
374 | ModuleDefId::ModuleId(_) => ImportKind::Module, | ||
375 | ModuleDefId::FunctionId(_) => ImportKind::Function, | ||
376 | ModuleDefId::AdtId(_) => ImportKind::Adt, | ||
377 | ModuleDefId::EnumVariantId(_) => ImportKind::EnumVariant, | ||
378 | ModuleDefId::ConstId(_) => ImportKind::Const, | ||
379 | ModuleDefId::StaticId(_) => ImportKind::Static, | ||
380 | ModuleDefId::TraitId(_) => ImportKind::Trait, | ||
381 | ModuleDefId::TypeAliasId(_) => ImportKind::TypeAlias, | ||
382 | ModuleDefId::BuiltinType(_) => ImportKind::BuiltinType, | ||
383 | }) | ||
384 | } | ||
385 | |||
344 | #[cfg(test)] | 386 | #[cfg(test)] |
345 | mod tests { | 387 | mod tests { |
346 | use base_db::{fixture::WithFixture, SourceDatabase, Upcast}; | 388 | use base_db::{fixture::WithFixture, SourceDatabase, Upcast}; |
@@ -758,4 +800,34 @@ mod tests { | |||
758 | "#]], | 800 | "#]], |
759 | ); | 801 | ); |
760 | } | 802 | } |
803 | |||
804 | #[test] | ||
805 | fn search_exclusions() { | ||
806 | let ra_fixture = r#" | ||
807 | //- /main.rs crate:main deps:dep | ||
808 | //- /dep.rs crate:dep | ||
809 | |||
810 | pub struct fmt; | ||
811 | pub struct FMT; | ||
812 | "#; | ||
813 | |||
814 | check_search( | ||
815 | ra_fixture, | ||
816 | "main", | ||
817 | Query::new("FMT"), | ||
818 | expect![[r#" | ||
819 | dep::fmt (t) | ||
820 | dep::fmt (v) | ||
821 | dep::FMT (t) | ||
822 | dep::FMT (v) | ||
823 | "#]], | ||
824 | ); | ||
825 | |||
826 | check_search( | ||
827 | ra_fixture, | ||
828 | "main", | ||
829 | Query::new("FMT").exclude_import_kind(ImportKind::Adt), | ||
830 | expect![[r#""#]], | ||
831 | ); | ||
832 | } | ||
761 | } | 833 | } |
diff --git a/crates/hir_def/src/nameres.rs b/crates/hir_def/src/nameres.rs index eb41d324e..ffd0381d4 100644 --- a/crates/hir_def/src/nameres.rs +++ b/crates/hir_def/src/nameres.rs | |||
@@ -286,8 +286,9 @@ mod diagnostics { | |||
286 | use cfg::{CfgExpr, CfgOptions}; | 286 | use cfg::{CfgExpr, CfgOptions}; |
287 | use hir_expand::diagnostics::DiagnosticSink; | 287 | use hir_expand::diagnostics::DiagnosticSink; |
288 | use hir_expand::hygiene::Hygiene; | 288 | use hir_expand::hygiene::Hygiene; |
289 | use hir_expand::InFile; | 289 | use hir_expand::{InFile, MacroCallKind}; |
290 | use syntax::{ast, AstPtr, SyntaxNodePtr}; | 290 | use syntax::ast::AttrsOwner; |
291 | use syntax::{ast, AstNode, AstPtr, SyntaxKind, SyntaxNodePtr}; | ||
291 | 292 | ||
292 | use crate::path::ModPath; | 293 | use crate::path::ModPath; |
293 | use crate::{db::DefDatabase, diagnostics::*, nameres::LocalModuleId, AstId}; | 294 | use crate::{db::DefDatabase, diagnostics::*, nameres::LocalModuleId, AstId}; |
@@ -300,7 +301,11 @@ mod diagnostics { | |||
300 | 301 | ||
301 | UnresolvedImport { ast: AstId<ast::Use>, index: usize }, | 302 | UnresolvedImport { ast: AstId<ast::Use>, index: usize }, |
302 | 303 | ||
303 | UnconfiguredCode { ast: InFile<SyntaxNodePtr>, cfg: CfgExpr, opts: CfgOptions }, | 304 | UnconfiguredCode { ast: AstId<ast::Item>, cfg: CfgExpr, opts: CfgOptions }, |
305 | |||
306 | UnresolvedProcMacro { ast: MacroCallKind }, | ||
307 | |||
308 | MacroError { ast: MacroCallKind, message: String }, | ||
304 | } | 309 | } |
305 | 310 | ||
306 | #[derive(Debug, PartialEq, Eq)] | 311 | #[derive(Debug, PartialEq, Eq)] |
@@ -341,13 +346,25 @@ mod diagnostics { | |||
341 | 346 | ||
342 | pub(super) fn unconfigured_code( | 347 | pub(super) fn unconfigured_code( |
343 | container: LocalModuleId, | 348 | container: LocalModuleId, |
344 | ast: InFile<SyntaxNodePtr>, | 349 | ast: AstId<ast::Item>, |
345 | cfg: CfgExpr, | 350 | cfg: CfgExpr, |
346 | opts: CfgOptions, | 351 | opts: CfgOptions, |
347 | ) -> Self { | 352 | ) -> Self { |
348 | Self { in_module: container, kind: DiagnosticKind::UnconfiguredCode { ast, cfg, opts } } | 353 | Self { in_module: container, kind: DiagnosticKind::UnconfiguredCode { ast, cfg, opts } } |
349 | } | 354 | } |
350 | 355 | ||
356 | pub(super) fn unresolved_proc_macro(container: LocalModuleId, ast: MacroCallKind) -> Self { | ||
357 | Self { in_module: container, kind: DiagnosticKind::UnresolvedProcMacro { ast } } | ||
358 | } | ||
359 | |||
360 | pub(super) fn macro_error( | ||
361 | container: LocalModuleId, | ||
362 | ast: MacroCallKind, | ||
363 | message: String, | ||
364 | ) -> Self { | ||
365 | Self { in_module: container, kind: DiagnosticKind::MacroError { ast, message } } | ||
366 | } | ||
367 | |||
351 | pub(super) fn add_to( | 368 | pub(super) fn add_to( |
352 | &self, | 369 | &self, |
353 | db: &dyn DefDatabase, | 370 | db: &dyn DefDatabase, |
@@ -399,13 +416,80 @@ mod diagnostics { | |||
399 | } | 416 | } |
400 | 417 | ||
401 | DiagnosticKind::UnconfiguredCode { ast, cfg, opts } => { | 418 | DiagnosticKind::UnconfiguredCode { ast, cfg, opts } => { |
419 | let item = ast.to_node(db.upcast()); | ||
402 | sink.push(InactiveCode { | 420 | sink.push(InactiveCode { |
403 | file: ast.file_id, | 421 | file: ast.file_id, |
404 | node: ast.value.clone(), | 422 | node: AstPtr::new(&item).into(), |
405 | cfg: cfg.clone(), | 423 | cfg: cfg.clone(), |
406 | opts: opts.clone(), | 424 | opts: opts.clone(), |
407 | }); | 425 | }); |
408 | } | 426 | } |
427 | |||
428 | DiagnosticKind::UnresolvedProcMacro { ast } => { | ||
429 | let mut precise_location = None; | ||
430 | let (file, ast, name) = match ast { | ||
431 | MacroCallKind::FnLike(ast) => { | ||
432 | let node = ast.to_node(db.upcast()); | ||
433 | (ast.file_id, SyntaxNodePtr::from(AstPtr::new(&node)), None) | ||
434 | } | ||
435 | MacroCallKind::Attr(ast, name) => { | ||
436 | let node = ast.to_node(db.upcast()); | ||
437 | |||
438 | // Compute the precise location of the macro name's token in the derive | ||
439 | // list. | ||
440 | // FIXME: This does not handle paths to the macro, but neither does the | ||
441 | // rest of r-a. | ||
442 | let derive_attrs = | ||
443 | node.attrs().filter_map(|attr| match attr.as_simple_call() { | ||
444 | Some((name, args)) if name == "derive" => Some(args), | ||
445 | _ => None, | ||
446 | }); | ||
447 | 'outer: for attr in derive_attrs { | ||
448 | let tokens = | ||
449 | attr.syntax().children_with_tokens().filter_map(|elem| { | ||
450 | match elem { | ||
451 | syntax::NodeOrToken::Node(_) => None, | ||
452 | syntax::NodeOrToken::Token(tok) => Some(tok), | ||
453 | } | ||
454 | }); | ||
455 | for token in tokens { | ||
456 | if token.kind() == SyntaxKind::IDENT | ||
457 | && token.to_string() == *name | ||
458 | { | ||
459 | precise_location = Some(token.text_range()); | ||
460 | break 'outer; | ||
461 | } | ||
462 | } | ||
463 | } | ||
464 | |||
465 | ( | ||
466 | ast.file_id, | ||
467 | SyntaxNodePtr::from(AstPtr::new(&node)), | ||
468 | Some(name.clone()), | ||
469 | ) | ||
470 | } | ||
471 | }; | ||
472 | sink.push(UnresolvedProcMacro { | ||
473 | file, | ||
474 | node: ast, | ||
475 | precise_location, | ||
476 | macro_name: name, | ||
477 | }); | ||
478 | } | ||
479 | |||
480 | DiagnosticKind::MacroError { ast, message } => { | ||
481 | let (file, ast) = match ast { | ||
482 | MacroCallKind::FnLike(ast) => { | ||
483 | let node = ast.to_node(db.upcast()); | ||
484 | (ast.file_id, SyntaxNodePtr::from(AstPtr::new(&node))) | ||
485 | } | ||
486 | MacroCallKind::Attr(ast, _) => { | ||
487 | let node = ast.to_node(db.upcast()); | ||
488 | (ast.file_id, SyntaxNodePtr::from(AstPtr::new(&node))) | ||
489 | } | ||
490 | }; | ||
491 | sink.push(MacroError { file, node: ast, message: message.clone() }); | ||
492 | } | ||
409 | } | 493 | } |
410 | } | 494 | } |
411 | } | 495 | } |
diff --git a/crates/hir_def/src/nameres/collector.rs b/crates/hir_def/src/nameres/collector.rs index 386287518..19cd713ba 100644 --- a/crates/hir_def/src/nameres/collector.rs +++ b/crates/hir_def/src/nameres/collector.rs | |||
@@ -7,7 +7,6 @@ use std::iter; | |||
7 | 7 | ||
8 | use base_db::{CrateId, FileId, ProcMacroId}; | 8 | use base_db::{CrateId, FileId, ProcMacroId}; |
9 | use cfg::{CfgExpr, CfgOptions}; | 9 | use cfg::{CfgExpr, CfgOptions}; |
10 | use hir_expand::InFile; | ||
11 | use hir_expand::{ | 10 | use hir_expand::{ |
12 | ast_id_map::FileAstId, | 11 | ast_id_map::FileAstId, |
13 | builtin_derive::find_builtin_derive, | 12 | builtin_derive::find_builtin_derive, |
@@ -16,6 +15,7 @@ use hir_expand::{ | |||
16 | proc_macro::ProcMacroExpander, | 15 | proc_macro::ProcMacroExpander, |
17 | HirFileId, MacroCallId, MacroDefId, MacroDefKind, | 16 | HirFileId, MacroCallId, MacroDefId, MacroDefKind, |
18 | }; | 17 | }; |
18 | use hir_expand::{InFile, MacroCallLoc}; | ||
19 | use rustc_hash::{FxHashMap, FxHashSet}; | 19 | use rustc_hash::{FxHashMap, FxHashSet}; |
20 | use syntax::ast; | 20 | use syntax::ast; |
21 | use test_utils::mark; | 21 | use test_utils::mark; |
@@ -812,7 +812,30 @@ impl DefCollector<'_> { | |||
812 | log::warn!("macro expansion is too deep"); | 812 | log::warn!("macro expansion is too deep"); |
813 | return; | 813 | return; |
814 | } | 814 | } |
815 | let file_id: HirFileId = macro_call_id.as_file(); | 815 | let file_id = macro_call_id.as_file(); |
816 | |||
817 | // First, fetch the raw expansion result for purposes of error reporting. This goes through | ||
818 | // `macro_expand_error` to avoid depending on the full expansion result (to improve | ||
819 | // incrementality). | ||
820 | let err = self.db.macro_expand_error(macro_call_id); | ||
821 | if let Some(err) = err { | ||
822 | if let MacroCallId::LazyMacro(id) = macro_call_id { | ||
823 | let loc: MacroCallLoc = self.db.lookup_intern_macro(id); | ||
824 | |||
825 | let diag = match err { | ||
826 | hir_expand::ExpandError::UnresolvedProcMacro => { | ||
827 | // Missing proc macros are non-fatal, so they are handled specially. | ||
828 | DefDiagnostic::unresolved_proc_macro(module_id, loc.kind) | ||
829 | } | ||
830 | _ => DefDiagnostic::macro_error(module_id, loc.kind, err.to_string()), | ||
831 | }; | ||
832 | |||
833 | self.def_map.diagnostics.push(diag); | ||
834 | } | ||
835 | // FIXME: Handle eager macros. | ||
836 | } | ||
837 | |||
838 | // Then, fetch and process the item tree. This will reuse the expansion result from above. | ||
816 | let item_tree = self.db.item_tree(file_id); | 839 | let item_tree = self.db.item_tree(file_id); |
817 | let mod_dir = self.mod_dirs[&module_id].clone(); | 840 | let mod_dir = self.mod_dirs[&module_id].clone(); |
818 | ModCollector { | 841 | ModCollector { |
@@ -1336,13 +1359,11 @@ impl ModCollector<'_, '_> { | |||
1336 | 1359 | ||
1337 | fn emit_unconfigured_diagnostic(&mut self, item: ModItem, cfg: &CfgExpr) { | 1360 | fn emit_unconfigured_diagnostic(&mut self, item: ModItem, cfg: &CfgExpr) { |
1338 | let ast_id = item.ast_id(self.item_tree); | 1361 | let ast_id = item.ast_id(self.item_tree); |
1339 | let id_map = self.def_collector.db.ast_id_map(self.file_id); | ||
1340 | let syntax_ptr = id_map.get(ast_id).syntax_node_ptr(); | ||
1341 | 1362 | ||
1342 | let ast_node = InFile::new(self.file_id, syntax_ptr); | 1363 | let ast_id = InFile::new(self.file_id, ast_id); |
1343 | self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code( | 1364 | self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code( |
1344 | self.module_id, | 1365 | self.module_id, |
1345 | ast_node, | 1366 | ast_id, |
1346 | cfg.clone(), | 1367 | cfg.clone(), |
1347 | self.def_collector.cfg_options.clone(), | 1368 | self.def_collector.cfg_options.clone(), |
1348 | )); | 1369 | )); |
diff --git a/crates/hir_def/src/nameres/tests/incremental.rs b/crates/hir_def/src/nameres/tests/incremental.rs index cfbc62cc4..8981fa7c9 100644 --- a/crates/hir_def/src/nameres/tests/incremental.rs +++ b/crates/hir_def/src/nameres/tests/incremental.rs | |||
@@ -38,6 +38,9 @@ fn typing_inside_a_function_should_not_invalidate_def_map() { | |||
38 | fn foo() -> i32 { | 38 | fn foo() -> i32 { |
39 | 1 + 1 | 39 | 1 + 1 |
40 | } | 40 | } |
41 | |||
42 | #[cfg(never)] | ||
43 | fn no() {} | ||
41 | //- /foo/mod.rs | 44 | //- /foo/mod.rs |
42 | pub mod bar; | 45 | pub mod bar; |
43 | 46 | ||
@@ -53,6 +56,9 @@ fn typing_inside_a_function_should_not_invalidate_def_map() { | |||
53 | use E::*; | 56 | use E::*; |
54 | 57 | ||
55 | fn foo() -> i32 { 92 } | 58 | fn foo() -> i32 { 92 } |
59 | |||
60 | #[cfg(never)] | ||
61 | fn no() {} | ||
56 | ", | 62 | ", |
57 | ); | 63 | ); |
58 | } | 64 | } |
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index ade57ac1b..ff50bfd82 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs | |||
@@ -3,7 +3,7 @@ | |||
3 | use std::sync::Arc; | 3 | use std::sync::Arc; |
4 | 4 | ||
5 | use base_db::{salsa, SourceDatabase}; | 5 | use base_db::{salsa, SourceDatabase}; |
6 | use mbe::{ExpandResult, MacroRules}; | 6 | use mbe::{ExpandError, ExpandResult, MacroRules}; |
7 | use parser::FragmentKind; | 7 | use parser::FragmentKind; |
8 | use syntax::{algo::diff, AstNode, GreenNode, Parse, SyntaxKind::*, SyntaxNode}; | 8 | use syntax::{algo::diff, AstNode, GreenNode, Parse, SyntaxKind::*, SyntaxNode}; |
9 | 9 | ||
@@ -75,9 +75,14 @@ pub trait AstDatabase: SourceDatabase { | |||
75 | #[salsa::transparent] | 75 | #[salsa::transparent] |
76 | fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>; | 76 | fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>; |
77 | fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>; | 77 | fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>; |
78 | fn parse_macro(&self, macro_file: MacroFile) | 78 | fn parse_macro_expansion( |
79 | -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>; | 79 | &self, |
80 | fn macro_expand(&self, macro_call: MacroCallId) -> (Option<Arc<tt::Subtree>>, Option<String>); | 80 | macro_file: MacroFile, |
81 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>>; | ||
82 | fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>>; | ||
83 | |||
84 | /// Firewall query that returns the error from the `macro_expand` query. | ||
85 | fn macro_expand_error(&self, macro_call: MacroCallId) -> Option<ExpandError>; | ||
81 | 86 | ||
82 | #[salsa::interned] | 87 | #[salsa::interned] |
83 | fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId; | 88 | fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId; |
@@ -102,23 +107,20 @@ pub fn expand_hypothetical( | |||
102 | let token_id = tmap_1.token_by_range(range)?; | 107 | let token_id = tmap_1.token_by_range(range)?; |
103 | let macro_def = expander(db, actual_macro_call)?; | 108 | let macro_def = expander(db, actual_macro_call)?; |
104 | let (node, tmap_2) = | 109 | let (node, tmap_2) = |
105 | parse_macro_with_arg(db, macro_file, Some(std::sync::Arc::new((tt, tmap_1))))?; | 110 | parse_macro_with_arg(db, macro_file, Some(std::sync::Arc::new((tt, tmap_1)))).value?; |
106 | let token_id = macro_def.0.map_id_down(token_id); | 111 | let token_id = macro_def.0.map_id_down(token_id); |
107 | let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; | 112 | let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; |
108 | let token = syntax::algo::find_covering_element(&node.syntax_node(), range).into_token()?; | 113 | let token = syntax::algo::find_covering_element(&node.syntax_node(), range).into_token()?; |
109 | Some((node.syntax_node(), token)) | 114 | Some((node.syntax_node(), token)) |
110 | } | 115 | } |
111 | 116 | ||
112 | pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { | 117 | fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { |
113 | let map = | 118 | let map = |
114 | db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it)); | 119 | db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it)); |
115 | Arc::new(map) | 120 | Arc::new(map) |
116 | } | 121 | } |
117 | 122 | ||
118 | pub(crate) fn macro_def( | 123 | fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { |
119 | db: &dyn AstDatabase, | ||
120 | id: MacroDefId, | ||
121 | ) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { | ||
122 | match id.kind { | 124 | match id.kind { |
123 | MacroDefKind::Declarative => { | 125 | MacroDefKind::Declarative => { |
124 | let macro_call = id.ast_id?.to_node(db); | 126 | let macro_call = id.ast_id?.to_node(db); |
@@ -149,7 +151,7 @@ pub(crate) fn macro_def( | |||
149 | } | 151 | } |
150 | } | 152 | } |
151 | 153 | ||
152 | pub(crate) fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> { | 154 | fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> { |
153 | let id = match id { | 155 | let id = match id { |
154 | MacroCallId::LazyMacro(id) => id, | 156 | MacroCallId::LazyMacro(id) => id, |
155 | MacroCallId::EagerMacro(_id) => { | 157 | MacroCallId::EagerMacro(_id) => { |
@@ -162,22 +164,20 @@ pub(crate) fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<Gr | |||
162 | Some(arg.green().clone()) | 164 | Some(arg.green().clone()) |
163 | } | 165 | } |
164 | 166 | ||
165 | pub(crate) fn macro_arg( | 167 | fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { |
166 | db: &dyn AstDatabase, | ||
167 | id: MacroCallId, | ||
168 | ) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { | ||
169 | let arg = db.macro_arg_text(id)?; | 168 | let arg = db.macro_arg_text(id)?; |
170 | let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg))?; | 169 | let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg))?; |
171 | Some(Arc::new((tt, tmap))) | 170 | Some(Arc::new((tt, tmap))) |
172 | } | 171 | } |
173 | 172 | ||
174 | pub(crate) fn macro_expand( | 173 | fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>> { |
175 | db: &dyn AstDatabase, | ||
176 | id: MacroCallId, | ||
177 | ) -> (Option<Arc<tt::Subtree>>, Option<String>) { | ||
178 | macro_expand_with_arg(db, id, None) | 174 | macro_expand_with_arg(db, id, None) |
179 | } | 175 | } |
180 | 176 | ||
177 | fn macro_expand_error(db: &dyn AstDatabase, macro_call: MacroCallId) -> Option<ExpandError> { | ||
178 | db.macro_expand(macro_call).err | ||
179 | } | ||
180 | |||
181 | fn expander(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { | 181 | fn expander(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { |
182 | let lazy_id = match id { | 182 | let lazy_id = match id { |
183 | MacroCallId::LazyMacro(id) => id, | 183 | MacroCallId::LazyMacro(id) => id, |
@@ -195,17 +195,20 @@ fn macro_expand_with_arg( | |||
195 | db: &dyn AstDatabase, | 195 | db: &dyn AstDatabase, |
196 | id: MacroCallId, | 196 | id: MacroCallId, |
197 | arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>, | 197 | arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>, |
198 | ) -> (Option<Arc<tt::Subtree>>, Option<String>) { | 198 | ) -> ExpandResult<Option<Arc<tt::Subtree>>> { |
199 | let _p = profile::span("macro_expand"); | ||
199 | let lazy_id = match id { | 200 | let lazy_id = match id { |
200 | MacroCallId::LazyMacro(id) => id, | 201 | MacroCallId::LazyMacro(id) => id, |
201 | MacroCallId::EagerMacro(id) => { | 202 | MacroCallId::EagerMacro(id) => { |
202 | if arg.is_some() { | 203 | if arg.is_some() { |
203 | return ( | 204 | return ExpandResult::str_err( |
204 | None, | 205 | "hypothetical macro expansion not implemented for eager macro".to_owned(), |
205 | Some("hypothetical macro expansion not implemented for eager macro".to_owned()), | ||
206 | ); | 206 | ); |
207 | } else { | 207 | } else { |
208 | return (Some(db.lookup_intern_eager_expansion(id).subtree), None); | 208 | return ExpandResult { |
209 | value: Some(db.lookup_intern_eager_expansion(id).subtree), | ||
210 | err: None, | ||
211 | }; | ||
209 | } | 212 | } |
210 | } | 213 | } |
211 | }; | 214 | }; |
@@ -213,23 +216,27 @@ fn macro_expand_with_arg( | |||
213 | let loc = db.lookup_intern_macro(lazy_id); | 216 | let loc = db.lookup_intern_macro(lazy_id); |
214 | let macro_arg = match arg.or_else(|| db.macro_arg(id)) { | 217 | let macro_arg = match arg.or_else(|| db.macro_arg(id)) { |
215 | Some(it) => it, | 218 | Some(it) => it, |
216 | None => return (None, Some("Fail to args in to tt::TokenTree".into())), | 219 | None => return ExpandResult::str_err("Fail to args in to tt::TokenTree".into()), |
217 | }; | 220 | }; |
218 | 221 | ||
219 | let macro_rules = match db.macro_def(loc.def) { | 222 | let macro_rules = match db.macro_def(loc.def) { |
220 | Some(it) => it, | 223 | Some(it) => it, |
221 | None => return (None, Some("Fail to find macro definition".into())), | 224 | None => return ExpandResult::str_err("Fail to find macro definition".into()), |
222 | }; | 225 | }; |
223 | let ExpandResult(tt, err) = macro_rules.0.expand(db, lazy_id, ¯o_arg.0); | 226 | let ExpandResult { value: tt, err } = macro_rules.0.expand(db, lazy_id, ¯o_arg.0); |
224 | // Set a hard limit for the expanded tt | 227 | // Set a hard limit for the expanded tt |
225 | let count = tt.count(); | 228 | let count = tt.count(); |
226 | if count > 262144 { | 229 | if count > 262144 { |
227 | return (None, Some(format!("Total tokens count exceed limit : count = {}", count))); | 230 | return ExpandResult::str_err(format!( |
231 | "Total tokens count exceed limit : count = {}", | ||
232 | count | ||
233 | )); | ||
228 | } | 234 | } |
229 | (Some(Arc::new(tt)), err.map(|e| format!("{:?}", e))) | 235 | |
236 | ExpandResult { value: Some(Arc::new(tt)), err } | ||
230 | } | 237 | } |
231 | 238 | ||
232 | pub(crate) fn expand_proc_macro( | 239 | fn expand_proc_macro( |
233 | db: &dyn AstDatabase, | 240 | db: &dyn AstDatabase, |
234 | id: MacroCallId, | 241 | id: MacroCallId, |
235 | ) -> Result<tt::Subtree, mbe::ExpandError> { | 242 | ) -> Result<tt::Subtree, mbe::ExpandError> { |
@@ -256,36 +263,37 @@ pub(crate) fn expand_proc_macro( | |||
256 | expander.expand(db, lazy_id, ¯o_arg.0) | 263 | expander.expand(db, lazy_id, ¯o_arg.0) |
257 | } | 264 | } |
258 | 265 | ||
259 | pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> { | 266 | fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> { |
260 | match file_id.0 { | 267 | match file_id.0 { |
261 | HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), | 268 | HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), |
262 | HirFileIdRepr::MacroFile(macro_file) => { | 269 | HirFileIdRepr::MacroFile(macro_file) => { |
263 | db.parse_macro(macro_file).map(|(it, _)| it.syntax_node()) | 270 | db.parse_macro_expansion(macro_file).value.map(|(it, _)| it.syntax_node()) |
264 | } | 271 | } |
265 | } | 272 | } |
266 | } | 273 | } |
267 | 274 | ||
268 | pub(crate) fn parse_macro( | 275 | fn parse_macro_expansion( |
269 | db: &dyn AstDatabase, | 276 | db: &dyn AstDatabase, |
270 | macro_file: MacroFile, | 277 | macro_file: MacroFile, |
271 | ) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> { | 278 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> { |
272 | parse_macro_with_arg(db, macro_file, None) | 279 | parse_macro_with_arg(db, macro_file, None) |
273 | } | 280 | } |
274 | 281 | ||
275 | pub fn parse_macro_with_arg( | 282 | fn parse_macro_with_arg( |
276 | db: &dyn AstDatabase, | 283 | db: &dyn AstDatabase, |
277 | macro_file: MacroFile, | 284 | macro_file: MacroFile, |
278 | arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>, | 285 | arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>, |
279 | ) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> { | 286 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> { |
280 | let _p = profile::span("parse_macro_query"); | ||
281 | |||
282 | let macro_call_id = macro_file.macro_call_id; | 287 | let macro_call_id = macro_file.macro_call_id; |
283 | let (tt, err) = if let Some(arg) = arg { | 288 | let result = if let Some(arg) = arg { |
284 | macro_expand_with_arg(db, macro_call_id, Some(arg)) | 289 | macro_expand_with_arg(db, macro_call_id, Some(arg)) |
285 | } else { | 290 | } else { |
286 | db.macro_expand(macro_call_id) | 291 | db.macro_expand(macro_call_id) |
287 | }; | 292 | }; |
288 | if let Some(err) = &err { | 293 | |
294 | let _p = profile::span("parse_macro_expansion"); | ||
295 | |||
296 | if let Some(err) = &result.err { | ||
289 | // Note: | 297 | // Note: |
290 | // The final goal we would like to make all parse_macro success, | 298 | // The final goal we would like to make all parse_macro success, |
291 | // such that the following log will not call anyway. | 299 | // such that the following log will not call anyway. |
@@ -303,40 +311,50 @@ pub fn parse_macro_with_arg( | |||
303 | .join("\n"); | 311 | .join("\n"); |
304 | 312 | ||
305 | log::warn!( | 313 | log::warn!( |
306 | "fail on macro_parse: (reason: {} macro_call: {:#}) parents: {}", | 314 | "fail on macro_parse: (reason: {:?} macro_call: {:#}) parents: {}", |
307 | err, | 315 | err, |
308 | node.value, | 316 | node.value, |
309 | parents | 317 | parents |
310 | ); | 318 | ); |
311 | } | 319 | } |
312 | _ => { | 320 | _ => { |
313 | log::warn!("fail on macro_parse: (reason: {})", err); | 321 | log::warn!("fail on macro_parse: (reason: {:?})", err); |
314 | } | 322 | } |
315 | } | 323 | } |
324 | } | ||
325 | let tt = match result.value { | ||
326 | Some(tt) => tt, | ||
327 | None => return ExpandResult { value: None, err: result.err }, | ||
316 | }; | 328 | }; |
317 | let tt = tt?; | ||
318 | 329 | ||
319 | let fragment_kind = to_fragment_kind(db, macro_call_id); | 330 | let fragment_kind = to_fragment_kind(db, macro_call_id); |
320 | 331 | ||
321 | let (parse, rev_token_map) = mbe::token_tree_to_syntax_node(&tt, fragment_kind).ok()?; | 332 | let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) { |
333 | Ok(it) => it, | ||
334 | Err(err) => { | ||
335 | return ExpandResult::only_err(err); | ||
336 | } | ||
337 | }; | ||
322 | 338 | ||
323 | if err.is_none() { | 339 | match result.err { |
324 | Some((parse, Arc::new(rev_token_map))) | 340 | Some(err) => { |
325 | } else { | 341 | // Safety check for recursive identity macro. |
326 | // FIXME: | 342 | let node = parse.syntax_node(); |
327 | // In future, we should propagate the actual error with recovery information | 343 | let file: HirFileId = macro_file.into(); |
328 | // instead of ignore the error here. | 344 | let call_node = match file.call_node(db) { |
329 | 345 | Some(it) => it, | |
330 | // Safe check for recurisve identity macro | 346 | None => { |
331 | let node = parse.syntax_node(); | 347 | return ExpandResult::only_err(err); |
332 | let file: HirFileId = macro_file.into(); | 348 | } |
333 | let call_node = file.call_node(db)?; | 349 | }; |
334 | 350 | ||
335 | if !diff(&node, &call_node.value).is_empty() { | 351 | if !diff(&node, &call_node.value).is_empty() { |
336 | Some((parse, Arc::new(rev_token_map))) | 352 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) } |
337 | } else { | 353 | } else { |
338 | None | 354 | return ExpandResult::only_err(err); |
355 | } | ||
339 | } | 356 | } |
357 | None => ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None }, | ||
340 | } | 358 | } |
341 | } | 359 | } |
342 | 360 | ||
diff --git a/crates/hir_expand/src/diagnostics.rs b/crates/hir_expand/src/diagnostics.rs index 1043c6aeb..bf0b85ce9 100644 --- a/crates/hir_expand/src/diagnostics.rs +++ b/crates/hir_expand/src/diagnostics.rs | |||
@@ -5,7 +5,7 @@ | |||
5 | //! | 5 | //! |
6 | //! `DiagnosticSink` struct is used as an emitter for diagnostic. When creating | 6 | //! `DiagnosticSink` struct is used as an emitter for diagnostic. When creating |
7 | //! a `DiagnosticSink`, you supply a callback which can react to a `dyn | 7 | //! a `DiagnosticSink`, you supply a callback which can react to a `dyn |
8 | //! Diagnostic` or to any concrete diagnostic (downcasting is sued internally). | 8 | //! Diagnostic` or to any concrete diagnostic (downcasting is used internally). |
9 | //! | 9 | //! |
10 | //! Because diagnostics store file offsets, it's a bad idea to store them | 10 | //! Because diagnostics store file offsets, it's a bad idea to store them |
11 | //! directly in salsa. For this reason, every hir subsytem defines it's own | 11 | //! directly in salsa. For this reason, every hir subsytem defines it's own |
@@ -32,7 +32,12 @@ impl DiagnosticCode { | |||
32 | pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static { | 32 | pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static { |
33 | fn code(&self) -> DiagnosticCode; | 33 | fn code(&self) -> DiagnosticCode; |
34 | fn message(&self) -> String; | 34 | fn message(&self) -> String; |
35 | /// Used in highlighting and related purposes | 35 | /// Source element that triggered the diagnostics. |
36 | /// | ||
37 | /// Note that this should reflect "semantics", rather than specific span we | ||
38 | /// want to highlight. When rendering the diagnostics into an error message, | ||
39 | /// the IDE will fetch the `SyntaxNode` and will narrow the span | ||
40 | /// appropriately. | ||
36 | fn display_source(&self) -> InFile<SyntaxNodePtr>; | 41 | fn display_source(&self) -> InFile<SyntaxNodePtr>; |
37 | fn as_any(&self) -> &(dyn Any + Send + 'static); | 42 | fn as_any(&self) -> &(dyn Any + Send + 'static); |
38 | fn is_experimental(&self) -> bool { | 43 | fn is_experimental(&self) -> bool { |
diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index 17f1178ed..6dad2507b 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs | |||
@@ -15,6 +15,8 @@ pub mod proc_macro; | |||
15 | pub mod quote; | 15 | pub mod quote; |
16 | pub mod eager; | 16 | pub mod eager; |
17 | 17 | ||
18 | pub use mbe::{ExpandError, ExpandResult}; | ||
19 | |||
18 | use std::hash::Hash; | 20 | use std::hash::Hash; |
19 | use std::sync::Arc; | 21 | use std::sync::Arc; |
20 | 22 | ||
@@ -144,7 +146,7 @@ impl HirFileId { | |||
144 | let def_tt = loc.def.ast_id?.to_node(db).token_tree()?; | 146 | let def_tt = loc.def.ast_id?.to_node(db).token_tree()?; |
145 | 147 | ||
146 | let macro_def = db.macro_def(loc.def)?; | 148 | let macro_def = db.macro_def(loc.def)?; |
147 | let (parse, exp_map) = db.parse_macro(macro_file)?; | 149 | let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?; |
148 | let macro_arg = db.macro_arg(macro_file.macro_call_id)?; | 150 | let macro_arg = db.macro_arg(macro_file.macro_call_id)?; |
149 | 151 | ||
150 | Some(ExpansionInfo { | 152 | Some(ExpansionInfo { |
@@ -253,7 +255,7 @@ pub enum MacroDefKind { | |||
253 | pub struct MacroCallLoc { | 255 | pub struct MacroCallLoc { |
254 | pub(crate) def: MacroDefId, | 256 | pub(crate) def: MacroDefId, |
255 | pub(crate) krate: CrateId, | 257 | pub(crate) krate: CrateId, |
256 | pub(crate) kind: MacroCallKind, | 258 | pub kind: MacroCallKind, |
257 | } | 259 | } |
258 | 260 | ||
259 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 261 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
diff --git a/crates/hir_expand/src/proc_macro.rs b/crates/hir_expand/src/proc_macro.rs index 7505cb061..97edf0fb6 100644 --- a/crates/hir_expand/src/proc_macro.rs +++ b/crates/hir_expand/src/proc_macro.rs | |||
@@ -50,7 +50,7 @@ impl ProcMacroExpander { | |||
50 | 50 | ||
51 | proc_macro.expander.expand(&tt, None).map_err(mbe::ExpandError::from) | 51 | proc_macro.expander.expand(&tt, None).map_err(mbe::ExpandError::from) |
52 | } | 52 | } |
53 | None => Err(err!("Unresolved proc macro")), | 53 | None => Err(mbe::ExpandError::UnresolvedProcMacro), |
54 | } | 54 | } |
55 | } | 55 | } |
56 | } | 56 | } |
diff --git a/crates/hir_ty/src/diagnostics/match_check.rs b/crates/hir_ty/src/diagnostics/match_check.rs index 5bd03f2ac..62c329731 100644 --- a/crates/hir_ty/src/diagnostics/match_check.rs +++ b/crates/hir_ty/src/diagnostics/match_check.rs | |||
@@ -216,14 +216,14 @@ | |||
216 | //! U(P, p) := U(P, (r_1, p_2, .., p_n)) | 216 | //! U(P, p) := U(P, (r_1, p_2, .., p_n)) |
217 | //! || U(P, (r_2, p_2, .., p_n)) | 217 | //! || U(P, (r_2, p_2, .., p_n)) |
218 | //! ``` | 218 | //! ``` |
219 | use std::sync::Arc; | 219 | use std::{iter, sync::Arc}; |
220 | 220 | ||
221 | use arena::Idx; | 221 | use arena::Idx; |
222 | use hir_def::{ | 222 | use hir_def::{ |
223 | adt::VariantData, | 223 | adt::VariantData, |
224 | body::Body, | 224 | body::Body, |
225 | expr::{Expr, Literal, Pat, PatId}, | 225 | expr::{Expr, Literal, Pat, PatId}, |
226 | AdtId, EnumVariantId, VariantId, | 226 | AdtId, EnumVariantId, StructId, VariantId, |
227 | }; | 227 | }; |
228 | use smallvec::{smallvec, SmallVec}; | 228 | use smallvec::{smallvec, SmallVec}; |
229 | 229 | ||
@@ -366,16 +366,17 @@ impl PatStack { | |||
366 | 366 | ||
367 | let head_pat = head.as_pat(cx); | 367 | let head_pat = head.as_pat(cx); |
368 | let result = match (head_pat, constructor) { | 368 | let result = match (head_pat, constructor) { |
369 | (Pat::Tuple { args: ref pat_ids, ellipsis }, Constructor::Tuple { arity: _ }) => { | 369 | (Pat::Tuple { args: pat_ids, ellipsis }, &Constructor::Tuple { arity }) => { |
370 | if ellipsis.is_some() { | 370 | if let Some(ellipsis) = ellipsis { |
371 | // If there are ellipsis here, we should add the correct number of | 371 | let (pre, post) = pat_ids.split_at(ellipsis); |
372 | // Pat::Wild patterns to `pat_ids`. We should be able to use the | 372 | let n_wild_pats = arity.saturating_sub(pat_ids.len()); |
373 | // constructors arity for this, but at the time of writing we aren't | 373 | let pre_iter = pre.iter().map(Into::into); |
374 | // correctly calculating this arity when ellipsis are present. | 374 | let wildcards = iter::repeat(PatIdOrWild::Wild).take(n_wild_pats); |
375 | return Err(MatchCheckErr::NotImplemented); | 375 | let post_iter = post.iter().map(Into::into); |
376 | Some(self.replace_head_with(pre_iter.chain(wildcards).chain(post_iter))) | ||
377 | } else { | ||
378 | Some(self.replace_head_with(pat_ids.iter())) | ||
376 | } | 379 | } |
377 | |||
378 | Some(self.replace_head_with(pat_ids.iter())) | ||
379 | } | 380 | } |
380 | (Pat::Lit(lit_expr), Constructor::Bool(constructor_val)) => { | 381 | (Pat::Lit(lit_expr), Constructor::Bool(constructor_val)) => { |
381 | match cx.body.exprs[lit_expr] { | 382 | match cx.body.exprs[lit_expr] { |
@@ -390,21 +391,28 @@ impl PatStack { | |||
390 | } | 391 | } |
391 | } | 392 | } |
392 | (Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?), | 393 | (Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?), |
393 | (Pat::Path(_), Constructor::Enum(constructor)) => { | 394 | (Pat::Path(_), constructor) => { |
394 | // unit enum variants become `Pat::Path` | 395 | // unit enum variants become `Pat::Path` |
395 | let pat_id = head.as_id().expect("we know this isn't a wild"); | 396 | let pat_id = head.as_id().expect("we know this isn't a wild"); |
396 | if !enum_variant_matches(cx, pat_id, *constructor) { | 397 | let variant_id: VariantId = match constructor { |
398 | &Constructor::Enum(e) => e.into(), | ||
399 | &Constructor::Struct(s) => s.into(), | ||
400 | _ => return Err(MatchCheckErr::NotImplemented), | ||
401 | }; | ||
402 | if Some(variant_id) != cx.infer.variant_resolution_for_pat(pat_id) { | ||
397 | None | 403 | None |
398 | } else { | 404 | } else { |
399 | Some(self.to_tail()) | 405 | Some(self.to_tail()) |
400 | } | 406 | } |
401 | } | 407 | } |
402 | ( | 408 | (Pat::TupleStruct { args: ref pat_ids, ellipsis, .. }, constructor) => { |
403 | Pat::TupleStruct { args: ref pat_ids, ellipsis, .. }, | ||
404 | Constructor::Enum(enum_constructor), | ||
405 | ) => { | ||
406 | let pat_id = head.as_id().expect("we know this isn't a wild"); | 409 | let pat_id = head.as_id().expect("we know this isn't a wild"); |
407 | if !enum_variant_matches(cx, pat_id, *enum_constructor) { | 410 | let variant_id: VariantId = match constructor { |
411 | &Constructor::Enum(e) => e.into(), | ||
412 | &Constructor::Struct(s) => s.into(), | ||
413 | _ => return Err(MatchCheckErr::MalformedMatchArm), | ||
414 | }; | ||
415 | if Some(variant_id) != cx.infer.variant_resolution_for_pat(pat_id) { | ||
408 | None | 416 | None |
409 | } else { | 417 | } else { |
410 | let constructor_arity = constructor.arity(cx)?; | 418 | let constructor_arity = constructor.arity(cx)?; |
@@ -442,12 +450,22 @@ impl PatStack { | |||
442 | } | 450 | } |
443 | } | 451 | } |
444 | } | 452 | } |
445 | (Pat::Record { args: ref arg_patterns, .. }, Constructor::Enum(e)) => { | 453 | (Pat::Record { args: ref arg_patterns, .. }, constructor) => { |
446 | let pat_id = head.as_id().expect("we know this isn't a wild"); | 454 | let pat_id = head.as_id().expect("we know this isn't a wild"); |
447 | if !enum_variant_matches(cx, pat_id, *e) { | 455 | let (variant_id, variant_data) = match constructor { |
456 | &Constructor::Enum(e) => ( | ||
457 | e.into(), | ||
458 | cx.db.enum_data(e.parent).variants[e.local_id].variant_data.clone(), | ||
459 | ), | ||
460 | &Constructor::Struct(s) => { | ||
461 | (s.into(), cx.db.struct_data(s).variant_data.clone()) | ||
462 | } | ||
463 | _ => return Err(MatchCheckErr::MalformedMatchArm), | ||
464 | }; | ||
465 | if Some(variant_id) != cx.infer.variant_resolution_for_pat(pat_id) { | ||
448 | None | 466 | None |
449 | } else { | 467 | } else { |
450 | match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() { | 468 | match variant_data.as_ref() { |
451 | VariantData::Record(struct_field_arena) => { | 469 | VariantData::Record(struct_field_arena) => { |
452 | // Here we treat any missing fields in the record as the wild pattern, as | 470 | // Here we treat any missing fields in the record as the wild pattern, as |
453 | // if the record has ellipsis. We want to do this here even if the | 471 | // if the record has ellipsis. We want to do this here even if the |
@@ -726,6 +744,7 @@ enum Constructor { | |||
726 | Bool(bool), | 744 | Bool(bool), |
727 | Tuple { arity: usize }, | 745 | Tuple { arity: usize }, |
728 | Enum(EnumVariantId), | 746 | Enum(EnumVariantId), |
747 | Struct(StructId), | ||
729 | } | 748 | } |
730 | 749 | ||
731 | impl Constructor { | 750 | impl Constructor { |
@@ -740,6 +759,11 @@ impl Constructor { | |||
740 | VariantData::Unit => 0, | 759 | VariantData::Unit => 0, |
741 | } | 760 | } |
742 | } | 761 | } |
762 | &Constructor::Struct(s) => match cx.db.struct_data(s).variant_data.as_ref() { | ||
763 | VariantData::Tuple(struct_field_data) => struct_field_data.len(), | ||
764 | VariantData::Record(struct_field_data) => struct_field_data.len(), | ||
765 | VariantData::Unit => 0, | ||
766 | }, | ||
743 | }; | 767 | }; |
744 | 768 | ||
745 | Ok(arity) | 769 | Ok(arity) |
@@ -748,7 +772,7 @@ impl Constructor { | |||
748 | fn all_constructors(&self, cx: &MatchCheckCtx) -> Vec<Constructor> { | 772 | fn all_constructors(&self, cx: &MatchCheckCtx) -> Vec<Constructor> { |
749 | match self { | 773 | match self { |
750 | Constructor::Bool(_) => vec![Constructor::Bool(true), Constructor::Bool(false)], | 774 | Constructor::Bool(_) => vec![Constructor::Bool(true), Constructor::Bool(false)], |
751 | Constructor::Tuple { .. } => vec![*self], | 775 | Constructor::Tuple { .. } | Constructor::Struct(_) => vec![*self], |
752 | Constructor::Enum(e) => cx | 776 | Constructor::Enum(e) => cx |
753 | .db | 777 | .db |
754 | .enum_data(e.parent) | 778 | .enum_data(e.parent) |
@@ -767,10 +791,11 @@ impl Constructor { | |||
767 | fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult<Option<Constructor>> { | 791 | fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult<Option<Constructor>> { |
768 | let res = match pat.as_pat(cx) { | 792 | let res = match pat.as_pat(cx) { |
769 | Pat::Wild => None, | 793 | Pat::Wild => None, |
770 | // FIXME somehow create the Tuple constructor with the proper arity. If there are | 794 | Pat::Tuple { .. } => { |
771 | // ellipsis, the arity is not equal to the number of patterns. | 795 | let pat_id = pat.as_id().expect("we already know this pattern is not a wild"); |
772 | Pat::Tuple { args: pats, ellipsis } if ellipsis.is_none() => { | 796 | Some(Constructor::Tuple { |
773 | Some(Constructor::Tuple { arity: pats.len() }) | 797 | arity: cx.infer.type_of_pat[pat_id].as_tuple().ok_or(MatchCheckErr::Unknown)?.len(), |
798 | }) | ||
774 | } | 799 | } |
775 | Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] { | 800 | Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] { |
776 | Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)), | 801 | Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)), |
@@ -784,6 +809,7 @@ fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult<Opt | |||
784 | VariantId::EnumVariantId(enum_variant_id) => { | 809 | VariantId::EnumVariantId(enum_variant_id) => { |
785 | Some(Constructor::Enum(enum_variant_id)) | 810 | Some(Constructor::Enum(enum_variant_id)) |
786 | } | 811 | } |
812 | VariantId::StructId(struct_id) => Some(Constructor::Struct(struct_id)), | ||
787 | _ => return Err(MatchCheckErr::NotImplemented), | 813 | _ => return Err(MatchCheckErr::NotImplemented), |
788 | } | 814 | } |
789 | } | 815 | } |
@@ -828,13 +854,13 @@ fn all_constructors_covered( | |||
828 | 854 | ||
829 | false | 855 | false |
830 | }), | 856 | }), |
857 | &Constructor::Struct(s) => used_constructors.iter().any(|constructor| match constructor { | ||
858 | &Constructor::Struct(sid) => sid == s, | ||
859 | _ => false, | ||
860 | }), | ||
831 | } | 861 | } |
832 | } | 862 | } |
833 | 863 | ||
834 | fn enum_variant_matches(cx: &MatchCheckCtx, pat_id: PatId, enum_variant_id: EnumVariantId) -> bool { | ||
835 | Some(enum_variant_id.into()) == cx.infer.variant_resolution_for_pat(pat_id) | ||
836 | } | ||
837 | |||
838 | #[cfg(test)] | 864 | #[cfg(test)] |
839 | mod tests { | 865 | mod tests { |
840 | use crate::diagnostics::tests::check_diagnostics; | 866 | use crate::diagnostics::tests::check_diagnostics; |
@@ -846,8 +872,8 @@ mod tests { | |||
846 | fn main() { | 872 | fn main() { |
847 | match () { } | 873 | match () { } |
848 | //^^ Missing match arm | 874 | //^^ Missing match arm |
849 | match (()) { } | 875 | match (()) { } |
850 | //^^^^ Missing match arm | 876 | //^^^^ Missing match arm |
851 | 877 | ||
852 | match () { _ => (), } | 878 | match () { _ => (), } |
853 | match () { () => (), } | 879 | match () { () => (), } |
@@ -1352,6 +1378,123 @@ fn main() { | |||
1352 | ); | 1378 | ); |
1353 | } | 1379 | } |
1354 | 1380 | ||
1381 | #[test] | ||
1382 | fn tuple_of_bools_with_ellipsis_at_end_missing_arm() { | ||
1383 | check_diagnostics( | ||
1384 | r#" | ||
1385 | fn main() { | ||
1386 | match (false, true, false) { | ||
1387 | //^^^^^^^^^^^^^^^^^^^^ Missing match arm | ||
1388 | (false, ..) => (), | ||
1389 | } | ||
1390 | }"#, | ||
1391 | ); | ||
1392 | } | ||
1393 | |||
1394 | #[test] | ||
1395 | fn tuple_of_bools_with_ellipsis_at_beginning_missing_arm() { | ||
1396 | check_diagnostics( | ||
1397 | r#" | ||
1398 | fn main() { | ||
1399 | match (false, true, false) { | ||
1400 | //^^^^^^^^^^^^^^^^^^^^ Missing match arm | ||
1401 | (.., false) => (), | ||
1402 | } | ||
1403 | }"#, | ||
1404 | ); | ||
1405 | } | ||
1406 | |||
1407 | #[test] | ||
1408 | fn tuple_of_bools_with_ellipsis_in_middle_missing_arm() { | ||
1409 | check_diagnostics( | ||
1410 | r#" | ||
1411 | fn main() { | ||
1412 | match (false, true, false) { | ||
1413 | //^^^^^^^^^^^^^^^^^^^^ Missing match arm | ||
1414 | (true, .., false) => (), | ||
1415 | } | ||
1416 | }"#, | ||
1417 | ); | ||
1418 | } | ||
1419 | |||
1420 | #[test] | ||
1421 | fn record_struct() { | ||
1422 | check_diagnostics( | ||
1423 | r#"struct Foo { a: bool } | ||
1424 | fn main(f: Foo) { | ||
1425 | match f {} | ||
1426 | //^ Missing match arm | ||
1427 | match f { Foo { a: true } => () } | ||
1428 | //^ Missing match arm | ||
1429 | match &f { Foo { a: true } => () } | ||
1430 | //^^ Missing match arm | ||
1431 | match f { Foo { a: _ } => () } | ||
1432 | match f { | ||
1433 | Foo { a: true } => (), | ||
1434 | Foo { a: false } => (), | ||
1435 | } | ||
1436 | match &f { | ||
1437 | Foo { a: true } => (), | ||
1438 | Foo { a: false } => (), | ||
1439 | } | ||
1440 | } | ||
1441 | "#, | ||
1442 | ); | ||
1443 | } | ||
1444 | |||
1445 | #[test] | ||
1446 | fn tuple_struct() { | ||
1447 | check_diagnostics( | ||
1448 | r#"struct Foo(bool); | ||
1449 | fn main(f: Foo) { | ||
1450 | match f {} | ||
1451 | //^ Missing match arm | ||
1452 | match f { Foo(true) => () } | ||
1453 | //^ Missing match arm | ||
1454 | match f { | ||
1455 | Foo(true) => (), | ||
1456 | Foo(false) => (), | ||
1457 | } | ||
1458 | } | ||
1459 | "#, | ||
1460 | ); | ||
1461 | } | ||
1462 | |||
1463 | #[test] | ||
1464 | fn unit_struct() { | ||
1465 | check_diagnostics( | ||
1466 | r#"struct Foo; | ||
1467 | fn main(f: Foo) { | ||
1468 | match f {} | ||
1469 | //^ Missing match arm | ||
1470 | match f { Foo => () } | ||
1471 | } | ||
1472 | "#, | ||
1473 | ); | ||
1474 | } | ||
1475 | |||
1476 | #[test] | ||
1477 | fn record_struct_ellipsis() { | ||
1478 | check_diagnostics( | ||
1479 | r#"struct Foo { foo: bool, bar: bool } | ||
1480 | fn main(f: Foo) { | ||
1481 | match f { Foo { foo: true, .. } => () } | ||
1482 | //^ Missing match arm | ||
1483 | match f { | ||
1484 | //^ Missing match arm | ||
1485 | Foo { foo: true, .. } => (), | ||
1486 | Foo { bar: false, .. } => () | ||
1487 | } | ||
1488 | match f { Foo { .. } => () } | ||
1489 | match f { | ||
1490 | Foo { foo: true, .. } => (), | ||
1491 | Foo { foo: false, .. } => () | ||
1492 | } | ||
1493 | } | ||
1494 | "#, | ||
1495 | ); | ||
1496 | } | ||
1497 | |||
1355 | mod false_negatives { | 1498 | mod false_negatives { |
1356 | //! The implementation of match checking here is a work in progress. As we roll this out, we | 1499 | //! The implementation of match checking here is a work in progress. As we roll this out, we |
1357 | //! prefer false negatives to false positives (ideally there would be no false positives). This | 1500 | //! prefer false negatives to false positives (ideally there would be no false positives). This |
@@ -1393,46 +1536,5 @@ fn main() { | |||
1393 | "#, | 1536 | "#, |
1394 | ); | 1537 | ); |
1395 | } | 1538 | } |
1396 | |||
1397 | #[test] | ||
1398 | fn tuple_of_bools_with_ellipsis_at_end_missing_arm() { | ||
1399 | // We don't currently handle tuple patterns with ellipsis. | ||
1400 | check_diagnostics( | ||
1401 | r#" | ||
1402 | fn main() { | ||
1403 | match (false, true, false) { | ||
1404 | (false, ..) => (), | ||
1405 | } | ||
1406 | } | ||
1407 | "#, | ||
1408 | ); | ||
1409 | } | ||
1410 | |||
1411 | #[test] | ||
1412 | fn tuple_of_bools_with_ellipsis_at_beginning_missing_arm() { | ||
1413 | // We don't currently handle tuple patterns with ellipsis. | ||
1414 | check_diagnostics( | ||
1415 | r#" | ||
1416 | fn main() { | ||
1417 | match (false, true, false) { | ||
1418 | (.., false) => (), | ||
1419 | } | ||
1420 | } | ||
1421 | "#, | ||
1422 | ); | ||
1423 | } | ||
1424 | |||
1425 | #[test] | ||
1426 | fn struct_missing_arm() { | ||
1427 | // We don't currently handle structs. | ||
1428 | check_diagnostics( | ||
1429 | r#" | ||
1430 | struct Foo { a: bool } | ||
1431 | fn main(f: Foo) { | ||
1432 | match f { Foo { a: true } => () } | ||
1433 | } | ||
1434 | "#, | ||
1435 | ); | ||
1436 | } | ||
1437 | } | 1539 | } |
1438 | } | 1540 | } |
diff --git a/crates/hir_ty/src/infer/pat.rs b/crates/hir_ty/src/infer/pat.rs index cde2ab82b..b70ec55eb 100644 --- a/crates/hir_ty/src/infer/pat.rs +++ b/crates/hir_ty/src/infer/pat.rs | |||
@@ -23,6 +23,7 @@ impl<'a> InferenceContext<'a> { | |||
23 | expected: &Ty, | 23 | expected: &Ty, |
24 | default_bm: BindingMode, | 24 | default_bm: BindingMode, |
25 | id: PatId, | 25 | id: PatId, |
26 | ellipsis: Option<usize>, | ||
26 | ) -> Ty { | 27 | ) -> Ty { |
27 | let (ty, def) = self.resolve_variant(path); | 28 | let (ty, def) = self.resolve_variant(path); |
28 | let var_data = def.map(|it| variant_data(self.db.upcast(), it)); | 29 | let var_data = def.map(|it| variant_data(self.db.upcast(), it)); |
@@ -34,8 +35,15 @@ impl<'a> InferenceContext<'a> { | |||
34 | let substs = ty.substs().unwrap_or_else(Substs::empty); | 35 | let substs = ty.substs().unwrap_or_else(Substs::empty); |
35 | 36 | ||
36 | let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default(); | 37 | let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default(); |
38 | let (pre, post) = match ellipsis { | ||
39 | Some(idx) => subpats.split_at(idx), | ||
40 | None => (&subpats[..], &[][..]), | ||
41 | }; | ||
42 | let post_idx_offset = field_tys.iter().count() - post.len(); | ||
37 | 43 | ||
38 | for (i, &subpat) in subpats.iter().enumerate() { | 44 | let pre_iter = pre.iter().enumerate(); |
45 | let post_iter = (post_idx_offset..).zip(post.iter()); | ||
46 | for (i, &subpat) in pre_iter.chain(post_iter) { | ||
39 | let expected_ty = var_data | 47 | let expected_ty = var_data |
40 | .as_ref() | 48 | .as_ref() |
41 | .and_then(|d| d.field(&Name::new_tuple_field(i))) | 49 | .and_then(|d| d.field(&Name::new_tuple_field(i))) |
@@ -111,20 +119,29 @@ impl<'a> InferenceContext<'a> { | |||
111 | let expected = expected; | 119 | let expected = expected; |
112 | 120 | ||
113 | let ty = match &body[pat] { | 121 | let ty = match &body[pat] { |
114 | Pat::Tuple { ref args, .. } => { | 122 | &Pat::Tuple { ref args, ellipsis } => { |
115 | let expectations = match expected.as_tuple() { | 123 | let expectations = match expected.as_tuple() { |
116 | Some(parameters) => &*parameters.0, | 124 | Some(parameters) => &*parameters.0, |
117 | _ => &[], | 125 | _ => &[], |
118 | }; | 126 | }; |
119 | let expectations_iter = expectations.iter().chain(repeat(&Ty::Unknown)); | ||
120 | 127 | ||
121 | let inner_tys = args | 128 | let (pre, post) = match ellipsis { |
122 | .iter() | 129 | Some(idx) => args.split_at(idx), |
123 | .zip(expectations_iter) | 130 | None => (&args[..], &[][..]), |
124 | .map(|(&pat, ty)| self.infer_pat(pat, ty, default_bm)) | 131 | }; |
125 | .collect(); | 132 | let n_uncovered_patterns = expectations.len().saturating_sub(args.len()); |
133 | let mut expectations_iter = expectations.iter().chain(repeat(&Ty::Unknown)); | ||
134 | let mut infer_pat = |(&pat, ty)| self.infer_pat(pat, ty, default_bm); | ||
135 | |||
136 | let mut inner_tys = Vec::with_capacity(n_uncovered_patterns + args.len()); | ||
137 | inner_tys.extend(pre.iter().zip(expectations_iter.by_ref()).map(&mut infer_pat)); | ||
138 | inner_tys.extend(expectations_iter.by_ref().take(n_uncovered_patterns).cloned()); | ||
139 | inner_tys.extend(post.iter().zip(expectations_iter).map(infer_pat)); | ||
126 | 140 | ||
127 | Ty::apply(TypeCtor::Tuple { cardinality: args.len() as u16 }, Substs(inner_tys)) | 141 | Ty::apply( |
142 | TypeCtor::Tuple { cardinality: inner_tys.len() as u16 }, | ||
143 | Substs(inner_tys.into()), | ||
144 | ) | ||
128 | } | 145 | } |
129 | Pat::Or(ref pats) => { | 146 | Pat::Or(ref pats) => { |
130 | if let Some((first_pat, rest)) = pats.split_first() { | 147 | if let Some((first_pat, rest)) = pats.split_first() { |
@@ -150,9 +167,14 @@ impl<'a> InferenceContext<'a> { | |||
150 | let subty = self.infer_pat(*pat, expectation, default_bm); | 167 | let subty = self.infer_pat(*pat, expectation, default_bm); |
151 | Ty::apply_one(TypeCtor::Ref(*mutability), subty) | 168 | Ty::apply_one(TypeCtor::Ref(*mutability), subty) |
152 | } | 169 | } |
153 | Pat::TupleStruct { path: p, args: subpats, .. } => { | 170 | Pat::TupleStruct { path: p, args: subpats, ellipsis } => self.infer_tuple_struct_pat( |
154 | self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm, pat) | 171 | p.as_ref(), |
155 | } | 172 | subpats, |
173 | expected, | ||
174 | default_bm, | ||
175 | pat, | ||
176 | *ellipsis, | ||
177 | ), | ||
156 | Pat::Record { path: p, args: fields, ellipsis: _ } => { | 178 | Pat::Record { path: p, args: fields, ellipsis: _ } => { |
157 | self.infer_record_pat(p.as_ref(), fields, expected, default_bm, pat) | 179 | self.infer_record_pat(p.as_ref(), fields, expected, default_bm, pat) |
158 | } | 180 | } |
diff --git a/crates/hir_ty/src/tests/patterns.rs b/crates/hir_ty/src/tests/patterns.rs index 6a965ac4f..5a5f48fd0 100644 --- a/crates/hir_ty/src/tests/patterns.rs +++ b/crates/hir_ty/src/tests/patterns.rs | |||
@@ -679,3 +679,98 @@ fn box_pattern() { | |||
679 | "#]], | 679 | "#]], |
680 | ); | 680 | ); |
681 | } | 681 | } |
682 | |||
683 | #[test] | ||
684 | fn tuple_ellipsis_pattern() { | ||
685 | check_infer( | ||
686 | r#" | ||
687 | fn foo(tuple: (u8, i16, f32)) { | ||
688 | match tuple { | ||
689 | (.., b, c) => {}, | ||
690 | (a, .., c) => {}, | ||
691 | (a, b, ..) => {}, | ||
692 | (a, b) => {/*too short*/} | ||
693 | (a, b, c, d) => {/*too long*/} | ||
694 | _ => {} | ||
695 | } | ||
696 | }"#, | ||
697 | expect![[r#" | ||
698 | 7..12 'tuple': (u8, i16, f32) | ||
699 | 30..224 '{ ... } }': () | ||
700 | 36..222 'match ... }': () | ||
701 | 42..47 'tuple': (u8, i16, f32) | ||
702 | 58..68 '(.., b, c)': (u8, i16, f32) | ||
703 | 63..64 'b': i16 | ||
704 | 66..67 'c': f32 | ||
705 | 72..74 '{}': () | ||
706 | 84..94 '(a, .., c)': (u8, i16, f32) | ||
707 | 85..86 'a': u8 | ||
708 | 92..93 'c': f32 | ||
709 | 98..100 '{}': () | ||
710 | 110..120 '(a, b, ..)': (u8, i16, f32) | ||
711 | 111..112 'a': u8 | ||
712 | 114..115 'b': i16 | ||
713 | 124..126 '{}': () | ||
714 | 136..142 '(a, b)': (u8, i16, f32) | ||
715 | 137..138 'a': u8 | ||
716 | 140..141 'b': i16 | ||
717 | 146..161 '{/*too short*/}': () | ||
718 | 170..182 '(a, b, c, d)': (u8, i16, f32, {unknown}) | ||
719 | 171..172 'a': u8 | ||
720 | 174..175 'b': i16 | ||
721 | 177..178 'c': f32 | ||
722 | 180..181 'd': {unknown} | ||
723 | 186..200 '{/*too long*/}': () | ||
724 | 209..210 '_': (u8, i16, f32) | ||
725 | 214..216 '{}': () | ||
726 | "#]], | ||
727 | ); | ||
728 | } | ||
729 | |||
730 | #[test] | ||
731 | fn tuple_struct_ellipsis_pattern() { | ||
732 | check_infer( | ||
733 | r#" | ||
734 | struct Tuple(u8, i16, f32); | ||
735 | fn foo(tuple: Tuple) { | ||
736 | match tuple { | ||
737 | Tuple(.., b, c) => {}, | ||
738 | Tuple(a, .., c) => {}, | ||
739 | Tuple(a, b, ..) => {}, | ||
740 | Tuple(a, b) => {/*too short*/} | ||
741 | Tuple(a, b, c, d) => {/*too long*/} | ||
742 | _ => {} | ||
743 | } | ||
744 | }"#, | ||
745 | expect![[r#" | ||
746 | 35..40 'tuple': Tuple | ||
747 | 49..268 '{ ... } }': () | ||
748 | 55..266 'match ... }': () | ||
749 | 61..66 'tuple': Tuple | ||
750 | 77..92 'Tuple(.., b, c)': Tuple | ||
751 | 87..88 'b': i16 | ||
752 | 90..91 'c': f32 | ||
753 | 96..98 '{}': () | ||
754 | 108..123 'Tuple(a, .., c)': Tuple | ||
755 | 114..115 'a': u8 | ||
756 | 121..122 'c': f32 | ||
757 | 127..129 '{}': () | ||
758 | 139..154 'Tuple(a, b, ..)': Tuple | ||
759 | 145..146 'a': u8 | ||
760 | 148..149 'b': i16 | ||
761 | 158..160 '{}': () | ||
762 | 170..181 'Tuple(a, b)': Tuple | ||
763 | 176..177 'a': u8 | ||
764 | 179..180 'b': i16 | ||
765 | 185..200 '{/*too short*/}': () | ||
766 | 209..226 'Tuple(... c, d)': Tuple | ||
767 | 215..216 'a': u8 | ||
768 | 218..219 'b': i16 | ||
769 | 221..222 'c': f32 | ||
770 | 224..225 'd': {unknown} | ||
771 | 230..244 '{/*too long*/}': () | ||
772 | 253..254 '_': Tuple | ||
773 | 258..260 '{}': () | ||
774 | "#]], | ||
775 | ); | ||
776 | } | ||
diff --git a/crates/ide/src/diagnostics.rs b/crates/ide/src/diagnostics.rs index 3df73ed4f..9d3d88289 100644 --- a/crates/ide/src/diagnostics.rs +++ b/crates/ide/src/diagnostics.rs | |||
@@ -142,6 +142,15 @@ pub(crate) fn diagnostics( | |||
142 | .with_code(Some(d.code())), | 142 | .with_code(Some(d.code())), |
143 | ); | 143 | ); |
144 | }) | 144 | }) |
145 | .on::<hir::diagnostics::UnresolvedProcMacro, _>(|d| { | ||
146 | // Use more accurate position if available. | ||
147 | let display_range = | ||
148 | d.precise_location.unwrap_or_else(|| sema.diagnostics_display_range(d).range); | ||
149 | |||
150 | // FIXME: it would be nice to tell the user whether proc macros are currently disabled | ||
151 | res.borrow_mut() | ||
152 | .push(Diagnostic::hint(display_range, d.message()).with_code(Some(d.code()))); | ||
153 | }) | ||
145 | // Only collect experimental diagnostics when they're enabled. | 154 | // Only collect experimental diagnostics when they're enabled. |
146 | .filter(|diag| !(diag.is_experimental() && config.disable_experimental)) | 155 | .filter(|diag| !(diag.is_experimental() && config.disable_experimental)) |
147 | .filter(|diag| !config.disabled.contains(diag.code().as_str())); | 156 | .filter(|diag| !config.disabled.contains(diag.code().as_str())); |
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 15792f947..b9810457f 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs | |||
@@ -1,14 +1,10 @@ | |||
1 | use hir::Semantics; | 1 | use hir::Semantics; |
2 | use ide_db::{ | 2 | use ide_db::{ |
3 | base_db::FileId, | ||
3 | defs::{NameClass, NameRefClass}, | 4 | defs::{NameClass, NameRefClass}, |
4 | symbol_index, RootDatabase, | 5 | symbol_index, RootDatabase, |
5 | }; | 6 | }; |
6 | use syntax::{ | 7 | use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset, T}; |
7 | ast::{self}, | ||
8 | match_ast, AstNode, | ||
9 | SyntaxKind::*, | ||
10 | SyntaxToken, TokenAtOffset, T, | ||
11 | }; | ||
12 | 8 | ||
13 | use crate::{ | 9 | use crate::{ |
14 | display::{ToNav, TryToNav}, | 10 | display::{ToNav, TryToNav}, |
@@ -44,6 +40,19 @@ pub(crate) fn goto_definition( | |||
44 | let nav = def.try_to_nav(sema.db)?; | 40 | let nav = def.try_to_nav(sema.db)?; |
45 | vec![nav] | 41 | vec![nav] |
46 | }, | 42 | }, |
43 | ast::SelfParam(self_param) => { | ||
44 | vec![self_to_nav_target(self_param, position.file_id)?] | ||
45 | }, | ||
46 | ast::PathSegment(segment) => { | ||
47 | segment.self_token()?; | ||
48 | let path = segment.parent_path(); | ||
49 | if path.qualifier().is_some() && !ast::PathExpr::can_cast(path.syntax().parent()?.kind()) { | ||
50 | return None; | ||
51 | } | ||
52 | let func = segment.syntax().ancestors().find_map(ast::Fn::cast)?; | ||
53 | let self_param = func.param_list()?.self_param()?; | ||
54 | vec![self_to_nav_target(self_param, position.file_id)?] | ||
55 | }, | ||
47 | _ => return None, | 56 | _ => return None, |
48 | } | 57 | } |
49 | }; | 58 | }; |
@@ -62,6 +71,20 @@ fn pick_best(tokens: TokenAtOffset<SyntaxToken>) -> Option<SyntaxToken> { | |||
62 | } | 71 | } |
63 | } | 72 | } |
64 | 73 | ||
74 | fn self_to_nav_target(self_param: ast::SelfParam, file_id: FileId) -> Option<NavigationTarget> { | ||
75 | let self_token = self_param.self_token()?; | ||
76 | Some(NavigationTarget { | ||
77 | file_id, | ||
78 | full_range: self_param.syntax().text_range(), | ||
79 | focus_range: Some(self_token.text_range()), | ||
80 | name: self_token.text().clone(), | ||
81 | kind: self_token.kind(), | ||
82 | container_name: None, | ||
83 | description: None, | ||
84 | docs: None, | ||
85 | }) | ||
86 | } | ||
87 | |||
65 | #[derive(Debug)] | 88 | #[derive(Debug)] |
66 | pub(crate) enum ReferenceResult { | 89 | pub(crate) enum ReferenceResult { |
67 | Exact(NavigationTarget), | 90 | Exact(NavigationTarget), |
@@ -984,4 +1007,33 @@ fn g() -> <() as Iterator<A = (), B<|> = u8>>::A {} | |||
984 | "#, | 1007 | "#, |
985 | ); | 1008 | ); |
986 | } | 1009 | } |
1010 | |||
1011 | #[test] | ||
1012 | fn goto_self_param_ty_specified() { | ||
1013 | check( | ||
1014 | r#" | ||
1015 | struct Foo {} | ||
1016 | |||
1017 | impl Foo { | ||
1018 | fn bar(self: &Foo) { | ||
1019 | //^^^^ | ||
1020 | let foo = sel<|>f; | ||
1021 | } | ||
1022 | }"#, | ||
1023 | ) | ||
1024 | } | ||
1025 | |||
1026 | #[test] | ||
1027 | fn goto_self_param_on_decl() { | ||
1028 | check( | ||
1029 | r#" | ||
1030 | struct Foo {} | ||
1031 | |||
1032 | impl Foo { | ||
1033 | fn bar(&self<|>) { | ||
1034 | //^^^^ | ||
1035 | } | ||
1036 | }"#, | ||
1037 | ) | ||
1038 | } | ||
987 | } | 1039 | } |
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 832192881..94ad800a0 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs | |||
@@ -139,14 +139,17 @@ pub(crate) fn hover( | |||
139 | } | 139 | } |
140 | } | 140 | } |
141 | 141 | ||
142 | let node = token | 142 | let node = token.ancestors().find(|n| { |
143 | .ancestors() | 143 | ast::Expr::can_cast(n.kind()) |
144 | .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?; | 144 | || ast::Pat::can_cast(n.kind()) |
145 | || ast::SelfParam::can_cast(n.kind()) | ||
146 | })?; | ||
145 | 147 | ||
146 | let ty = match_ast! { | 148 | let ty = match_ast! { |
147 | match node { | 149 | match node { |
148 | ast::Expr(it) => sema.type_of_expr(&it)?, | 150 | ast::Expr(it) => sema.type_of_expr(&it)?, |
149 | ast::Pat(it) => sema.type_of_pat(&it)?, | 151 | ast::Pat(it) => sema.type_of_pat(&it)?, |
152 | ast::SelfParam(self_param) => sema.type_of_self(&self_param)?, | ||
150 | // If this node is a MACRO_CALL, it means that `descend_into_macros` failed to resolve. | 153 | // If this node is a MACRO_CALL, it means that `descend_into_macros` failed to resolve. |
151 | // (e.g expanding a builtin macro). So we give up here. | 154 | // (e.g expanding a builtin macro). So we give up here. |
152 | ast::MacroCall(_it) => return None, | 155 | ast::MacroCall(_it) => return None, |
@@ -3282,4 +3285,41 @@ fn main() { | |||
3282 | "#]], | 3285 | "#]], |
3283 | ); | 3286 | ); |
3284 | } | 3287 | } |
3288 | |||
3289 | #[test] | ||
3290 | fn hover_self_param_shows_type() { | ||
3291 | check( | ||
3292 | r#" | ||
3293 | struct Foo {} | ||
3294 | impl Foo { | ||
3295 | fn bar(&sel<|>f) {} | ||
3296 | } | ||
3297 | "#, | ||
3298 | expect![[r#" | ||
3299 | *&self* | ||
3300 | ```rust | ||
3301 | &Foo | ||
3302 | ``` | ||
3303 | "#]], | ||
3304 | ); | ||
3305 | } | ||
3306 | |||
3307 | #[test] | ||
3308 | fn hover_self_param_shows_type_for_arbitrary_self_type() { | ||
3309 | check( | ||
3310 | r#" | ||
3311 | struct Arc<T>(T); | ||
3312 | struct Foo {} | ||
3313 | impl Foo { | ||
3314 | fn bar(sel<|>f: Arc<Foo>) {} | ||
3315 | } | ||
3316 | "#, | ||
3317 | expect![[r#" | ||
3318 | *self: Arc<Foo>* | ||
3319 | ```rust | ||
3320 | Arc<Foo> | ||
3321 | ``` | ||
3322 | "#]], | ||
3323 | ); | ||
3324 | } | ||
3285 | } | 3325 | } |
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 6cfb22e13..65df7979c 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | use assists::utils::FamousDefs; | ||
2 | use either::Either; | 1 | use either::Either; |
3 | use hir::{known, Callable, HirDisplay, Semantics}; | 2 | use hir::{known, Callable, HirDisplay, Semantics}; |
3 | use ide_db::helpers::FamousDefs; | ||
4 | use ide_db::RootDatabase; | 4 | use ide_db::RootDatabase; |
5 | use stdx::to_lower_snake_case; | 5 | use stdx::to_lower_snake_case; |
6 | use syntax::{ | 6 | use syntax::{ |
@@ -427,8 +427,8 @@ fn get_callable(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option<hir: | |||
427 | 427 | ||
428 | #[cfg(test)] | 428 | #[cfg(test)] |
429 | mod tests { | 429 | mod tests { |
430 | use assists::utils::FamousDefs; | ||
431 | use expect_test::{expect, Expect}; | 430 | use expect_test::{expect, Expect}; |
431 | use ide_db::helpers::FamousDefs; | ||
432 | use test_utils::extract_annotations; | 432 | use test_utils::extract_annotations; |
433 | 433 | ||
434 | use crate::{fixture, inlay_hints::InlayHintsConfig}; | 434 | use crate::{fixture, inlay_hints::InlayHintsConfig}; |
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 6288f7ea7..5244bdd61 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs | |||
@@ -87,9 +87,7 @@ pub use ide_db::{ | |||
87 | search::{Reference, ReferenceAccess, ReferenceKind}, | 87 | search::{Reference, ReferenceAccess, ReferenceKind}, |
88 | }; | 88 | }; |
89 | 89 | ||
90 | pub use assists::{ | 90 | pub use assists::{Assist, AssistConfig, AssistId, AssistKind, ResolvedAssist}; |
91 | utils::MergeBehaviour, Assist, AssistConfig, AssistId, AssistKind, ResolvedAssist, | ||
92 | }; | ||
93 | pub use hir::{Documentation, Semantics}; | 91 | pub use hir::{Documentation, Semantics}; |
94 | pub use ide_db::base_db::{ | 92 | pub use ide_db::base_db::{ |
95 | Canceled, Change, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRoot, | 93 | Canceled, Change, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRoot, |
diff --git a/crates/ide/src/references/rename.rs b/crates/ide/src/references/rename.rs index b8725693a..731457696 100644 --- a/crates/ide/src/references/rename.rs +++ b/crates/ide/src/references/rename.rs | |||
@@ -1,4 +1,9 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | use std::{ | ||
3 | convert::TryInto, | ||
4 | error::Error, | ||
5 | fmt::{self, Display}, | ||
6 | }; | ||
2 | 7 | ||
3 | use hir::{Module, ModuleDef, ModuleSource, Semantics}; | 8 | use hir::{Module, ModuleDef, ModuleSource, Semantics}; |
4 | use ide_db::base_db::{FileRange, SourceDatabaseExt}; | 9 | use ide_db::base_db::{FileRange, SourceDatabaseExt}; |
@@ -6,12 +11,6 @@ use ide_db::{ | |||
6 | defs::{Definition, NameClass, NameRefClass}, | 11 | defs::{Definition, NameClass, NameRefClass}, |
7 | RootDatabase, | 12 | RootDatabase, |
8 | }; | 13 | }; |
9 | |||
10 | use std::{ | ||
11 | convert::TryInto, | ||
12 | error::Error, | ||
13 | fmt::{self, Display}, | ||
14 | }; | ||
15 | use syntax::{ | 14 | use syntax::{ |
16 | algo::find_node_at_offset, | 15 | algo::find_node_at_offset, |
17 | ast::{self, NameOwner}, | 16 | ast::{self, NameOwner}, |
@@ -222,24 +221,47 @@ fn rename_to_self( | |||
222 | let source_file = sema.parse(position.file_id); | 221 | let source_file = sema.parse(position.file_id); |
223 | let syn = source_file.syntax(); | 222 | let syn = source_file.syntax(); |
224 | 223 | ||
225 | let fn_def = find_node_at_offset::<ast::Fn>(syn, position.offset) | 224 | let (fn_def, fn_ast) = find_node_at_offset::<ast::Fn>(syn, position.offset) |
225 | .and_then(|fn_ast| sema.to_def(&fn_ast).zip(Some(fn_ast))) | ||
226 | .ok_or_else(|| RenameError("No surrounding method declaration found".to_string()))?; | 226 | .ok_or_else(|| RenameError("No surrounding method declaration found".to_string()))?; |
227 | let params = | 227 | let param_range = fn_ast |
228 | fn_def.param_list().ok_or_else(|| RenameError("Method has no parameters".to_string()))?; | 228 | .param_list() |
229 | if params.self_param().is_some() { | 229 | .and_then(|p| p.params().next()) |
230 | .ok_or_else(|| RenameError("Method has no parameters".to_string()))? | ||
231 | .syntax() | ||
232 | .text_range(); | ||
233 | if !param_range.contains(position.offset) { | ||
234 | return Err(RenameError("Only the first parameter can be self".to_string())); | ||
235 | } | ||
236 | |||
237 | let impl_block = find_node_at_offset::<ast::Impl>(syn, position.offset) | ||
238 | .and_then(|def| sema.to_def(&def)) | ||
239 | .ok_or_else(|| RenameError("No impl block found for function".to_string()))?; | ||
240 | if fn_def.self_param(sema.db).is_some() { | ||
230 | return Err(RenameError("Method already has a self parameter".to_string())); | 241 | return Err(RenameError("Method already has a self parameter".to_string())); |
231 | } | 242 | } |
243 | |||
244 | let params = fn_def.params(sema.db); | ||
232 | let first_param = | 245 | let first_param = |
233 | params.params().next().ok_or_else(|| RenameError("Method has no parameters".into()))?; | 246 | params.first().ok_or_else(|| RenameError("Method has no parameters".into()))?; |
234 | let mutable = match first_param.ty() { | 247 | let first_param_ty = first_param.ty(); |
235 | Some(ast::Type::RefType(rt)) => rt.mut_token().is_some(), | 248 | let impl_ty = impl_block.target_ty(sema.db); |
236 | _ => return Err(RenameError("Not renaming other types".to_string())), | 249 | let (ty, self_param) = if impl_ty.remove_ref().is_some() { |
250 | // if the impl is a ref to the type we can just match the `&T` with self directly | ||
251 | (first_param_ty.clone(), "self") | ||
252 | } else { | ||
253 | first_param_ty.remove_ref().map_or((first_param_ty.clone(), "self"), |ty| { | ||
254 | (ty, if first_param_ty.is_mutable_reference() { "&mut self" } else { "&self" }) | ||
255 | }) | ||
237 | }; | 256 | }; |
238 | 257 | ||
258 | if ty != impl_ty { | ||
259 | return Err(RenameError("Parameter type differs from impl block type".to_string())); | ||
260 | } | ||
261 | |||
239 | let RangeInfo { range, info: refs } = find_all_refs(sema, position, None) | 262 | let RangeInfo { range, info: refs } = find_all_refs(sema, position, None) |
240 | .ok_or_else(|| RenameError("No reference found at position".to_string()))?; | 263 | .ok_or_else(|| RenameError("No reference found at position".to_string()))?; |
241 | 264 | ||
242 | let param_range = first_param.syntax().text_range(); | ||
243 | let (param_ref, usages): (Vec<Reference>, Vec<Reference>) = refs | 265 | let (param_ref, usages): (Vec<Reference>, Vec<Reference>) = refs |
244 | .into_iter() | 266 | .into_iter() |
245 | .partition(|reference| param_range.intersect(reference.file_range.range).is_some()); | 267 | .partition(|reference| param_range.intersect(reference.file_range.range).is_some()); |
@@ -255,10 +277,7 @@ fn rename_to_self( | |||
255 | 277 | ||
256 | edits.push(SourceFileEdit { | 278 | edits.push(SourceFileEdit { |
257 | file_id: position.file_id, | 279 | file_id: position.file_id, |
258 | edit: TextEdit::replace( | 280 | edit: TextEdit::replace(param_range, String::from(self_param)), |
259 | param_range, | ||
260 | String::from(if mutable { "&mut self" } else { "&self" }), | ||
261 | ), | ||
262 | }); | 281 | }); |
263 | 282 | ||
264 | Ok(RangeInfo::new(range, SourceChange::from(edits))) | 283 | Ok(RangeInfo::new(range, SourceChange::from(edits))) |
@@ -281,7 +300,11 @@ fn text_edit_from_self_param( | |||
281 | 300 | ||
282 | let mut replacement_text = String::from(new_name); | 301 | let mut replacement_text = String::from(new_name); |
283 | replacement_text.push_str(": "); | 302 | replacement_text.push_str(": "); |
284 | replacement_text.push_str(self_param.mut_token().map_or("&", |_| "&mut ")); | 303 | match (self_param.amp_token(), self_param.mut_token()) { |
304 | (None, None) => (), | ||
305 | (Some(_), None) => replacement_text.push('&'), | ||
306 | (_, Some(_)) => replacement_text.push_str("&mut "), | ||
307 | }; | ||
285 | replacement_text.push_str(type_name.as_str()); | 308 | replacement_text.push_str(type_name.as_str()); |
286 | 309 | ||
287 | Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text)) | 310 | Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text)) |
@@ -1083,6 +1106,95 @@ impl Foo { | |||
1083 | } | 1106 | } |
1084 | "#, | 1107 | "#, |
1085 | ); | 1108 | ); |
1109 | check( | ||
1110 | "self", | ||
1111 | r#" | ||
1112 | struct Foo { i: i32 } | ||
1113 | |||
1114 | impl Foo { | ||
1115 | fn f(foo<|>: Foo) -> i32 { | ||
1116 | foo.i | ||
1117 | } | ||
1118 | } | ||
1119 | "#, | ||
1120 | r#" | ||
1121 | struct Foo { i: i32 } | ||
1122 | |||
1123 | impl Foo { | ||
1124 | fn f(self) -> i32 { | ||
1125 | self.i | ||
1126 | } | ||
1127 | } | ||
1128 | "#, | ||
1129 | ); | ||
1130 | } | ||
1131 | |||
1132 | #[test] | ||
1133 | fn test_parameter_to_self_error_no_impl() { | ||
1134 | check( | ||
1135 | "self", | ||
1136 | r#" | ||
1137 | struct Foo { i: i32 } | ||
1138 | |||
1139 | fn f(foo<|>: &mut Foo) -> i32 { | ||
1140 | foo.i | ||
1141 | } | ||
1142 | "#, | ||
1143 | "error: No impl block found for function", | ||
1144 | ); | ||
1145 | check( | ||
1146 | "self", | ||
1147 | r#" | ||
1148 | struct Foo { i: i32 } | ||
1149 | struct Bar; | ||
1150 | |||
1151 | impl Bar { | ||
1152 | fn f(foo<|>: &mut Foo) -> i32 { | ||
1153 | foo.i | ||
1154 | } | ||
1155 | } | ||
1156 | "#, | ||
1157 | "error: Parameter type differs from impl block type", | ||
1158 | ); | ||
1159 | } | ||
1160 | |||
1161 | #[test] | ||
1162 | fn test_parameter_to_self_error_not_first() { | ||
1163 | check( | ||
1164 | "self", | ||
1165 | r#" | ||
1166 | struct Foo { i: i32 } | ||
1167 | impl Foo { | ||
1168 | fn f(x: (), foo<|>: &mut Foo) -> i32 { | ||
1169 | foo.i | ||
1170 | } | ||
1171 | } | ||
1172 | "#, | ||
1173 | "error: Only the first parameter can be self", | ||
1174 | ); | ||
1175 | } | ||
1176 | |||
1177 | #[test] | ||
1178 | fn test_parameter_to_self_impl_ref() { | ||
1179 | check( | ||
1180 | "self", | ||
1181 | r#" | ||
1182 | struct Foo { i: i32 } | ||
1183 | impl &Foo { | ||
1184 | fn f(foo<|>: &Foo) -> i32 { | ||
1185 | foo.i | ||
1186 | } | ||
1187 | } | ||
1188 | "#, | ||
1189 | r#" | ||
1190 | struct Foo { i: i32 } | ||
1191 | impl &Foo { | ||
1192 | fn f(self) -> i32 { | ||
1193 | self.i | ||
1194 | } | ||
1195 | } | ||
1196 | "#, | ||
1197 | ); | ||
1086 | } | 1198 | } |
1087 | 1199 | ||
1088 | #[test] | 1200 | #[test] |
@@ -1111,6 +1223,31 @@ impl Foo { | |||
1111 | } | 1223 | } |
1112 | 1224 | ||
1113 | #[test] | 1225 | #[test] |
1226 | fn test_owned_self_to_parameter() { | ||
1227 | check( | ||
1228 | "foo", | ||
1229 | r#" | ||
1230 | struct Foo { i: i32 } | ||
1231 | |||
1232 | impl Foo { | ||
1233 | fn f(<|>self) -> i32 { | ||
1234 | self.i | ||
1235 | } | ||
1236 | } | ||
1237 | "#, | ||
1238 | r#" | ||
1239 | struct Foo { i: i32 } | ||
1240 | |||
1241 | impl Foo { | ||
1242 | fn f(foo: Foo) -> i32 { | ||
1243 | foo.i | ||
1244 | } | ||
1245 | } | ||
1246 | "#, | ||
1247 | ); | ||
1248 | } | ||
1249 | |||
1250 | #[test] | ||
1114 | fn test_self_in_path_to_parameter() { | 1251 | fn test_self_in_path_to_parameter() { |
1115 | check( | 1252 | check( |
1116 | "foo", | 1253 | "foo", |
diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs index 8e91c99d7..e10d7c3a4 100644 --- a/crates/ide/src/status.rs +++ b/crates/ide/src/status.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | use std::{fmt, iter::FromIterator, sync::Arc}; | 1 | use std::{fmt, iter::FromIterator, sync::Arc}; |
2 | 2 | ||
3 | use hir::MacroFile; | 3 | use hir::{ExpandResult, MacroFile}; |
4 | use ide_db::base_db::{ | 4 | use ide_db::base_db::{ |
5 | salsa::debug::{DebugQueryTable, TableEntry}, | 5 | salsa::debug::{DebugQueryTable, TableEntry}, |
6 | CrateId, FileId, FileTextQuery, SourceDatabase, SourceRootId, | 6 | CrateId, FileId, FileTextQuery, SourceDatabase, SourceRootId, |
@@ -19,7 +19,7 @@ fn syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { | |||
19 | ide_db::base_db::ParseQuery.in_db(db).entries::<SyntaxTreeStats>() | 19 | ide_db::base_db::ParseQuery.in_db(db).entries::<SyntaxTreeStats>() |
20 | } | 20 | } |
21 | fn macro_syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { | 21 | fn macro_syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { |
22 | hir::db::ParseMacroQuery.in_db(db).entries::<SyntaxTreeStats>() | 22 | hir::db::ParseMacroExpansionQuery.in_db(db).entries::<SyntaxTreeStats>() |
23 | } | 23 | } |
24 | 24 | ||
25 | // Feature: Status | 25 | // Feature: Status |
@@ -115,10 +115,12 @@ impl FromIterator<TableEntry<FileId, Parse<ast::SourceFile>>> for SyntaxTreeStat | |||
115 | } | 115 | } |
116 | } | 116 | } |
117 | 117 | ||
118 | impl<M> FromIterator<TableEntry<MacroFile, Option<(Parse<SyntaxNode>, M)>>> for SyntaxTreeStats { | 118 | impl<M> FromIterator<TableEntry<MacroFile, ExpandResult<Option<(Parse<SyntaxNode>, M)>>>> |
119 | for SyntaxTreeStats | ||
120 | { | ||
119 | fn from_iter<T>(iter: T) -> SyntaxTreeStats | 121 | fn from_iter<T>(iter: T) -> SyntaxTreeStats |
120 | where | 122 | where |
121 | T: IntoIterator<Item = TableEntry<MacroFile, Option<(Parse<SyntaxNode>, M)>>>, | 123 | T: IntoIterator<Item = TableEntry<MacroFile, ExpandResult<Option<(Parse<SyntaxNode>, M)>>>>, |
122 | { | 124 | { |
123 | let mut res = SyntaxTreeStats::default(); | 125 | let mut res = SyntaxTreeStats::default(); |
124 | for entry in iter { | 126 | for entry in iter { |
diff --git a/crates/ide_db/Cargo.toml b/crates/ide_db/Cargo.toml index 72a9212f1..0ad6e1000 100644 --- a/crates/ide_db/Cargo.toml +++ b/crates/ide_db/Cargo.toml | |||
@@ -18,7 +18,8 @@ rayon = "1.5.0" | |||
18 | fst = { version = "0.4", default-features = false } | 18 | fst = { version = "0.4", default-features = false } |
19 | rustc-hash = "1.1.0" | 19 | rustc-hash = "1.1.0" |
20 | once_cell = "1.3.1" | 20 | once_cell = "1.3.1" |
21 | either = "1.5.3" | 21 | either = "1.6.1" |
22 | itertools = "0.9.0" | ||
22 | 23 | ||
23 | stdx = { path = "../stdx", version = "0.0.0" } | 24 | stdx = { path = "../stdx", version = "0.0.0" } |
24 | syntax = { path = "../syntax", version = "0.0.0" } | 25 | syntax = { path = "../syntax", version = "0.0.0" } |
diff --git a/crates/ide_db/src/apply_change.rs b/crates/ide_db/src/apply_change.rs index da16fa21d..987191fe3 100644 --- a/crates/ide_db/src/apply_change.rs +++ b/crates/ide_db/src/apply_change.rs | |||
@@ -76,7 +76,7 @@ impl RootDatabase { | |||
76 | let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); | 76 | let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); |
77 | 77 | ||
78 | base_db::ParseQuery.in_db(self).sweep(sweep); | 78 | base_db::ParseQuery.in_db(self).sweep(sweep); |
79 | hir::db::ParseMacroQuery.in_db(self).sweep(sweep); | 79 | hir::db::ParseMacroExpansionQuery.in_db(self).sweep(sweep); |
80 | 80 | ||
81 | // Macros do take significant space, but less then the syntax trees | 81 | // Macros do take significant space, but less then the syntax trees |
82 | // self.query(hir::db::MacroDefQuery).sweep(sweep); | 82 | // self.query(hir::db::MacroDefQuery).sweep(sweep); |
@@ -143,7 +143,7 @@ impl RootDatabase { | |||
143 | hir::db::AstIdMapQuery | 143 | hir::db::AstIdMapQuery |
144 | hir::db::MacroArgTextQuery | 144 | hir::db::MacroArgTextQuery |
145 | hir::db::MacroDefQuery | 145 | hir::db::MacroDefQuery |
146 | hir::db::ParseMacroQuery | 146 | hir::db::ParseMacroExpansionQuery |
147 | hir::db::MacroExpandQuery | 147 | hir::db::MacroExpandQuery |
148 | 148 | ||
149 | // DefDatabase | 149 | // DefDatabase |
diff --git a/crates/ide_db/src/helpers.rs b/crates/ide_db/src/helpers.rs new file mode 100644 index 000000000..d988588ff --- /dev/null +++ b/crates/ide_db/src/helpers.rs | |||
@@ -0,0 +1,203 @@ | |||
1 | //! A module with ide helpers for high-level ide features. | ||
2 | use crate::RootDatabase; | ||
3 | use hir::{Crate, Enum, Module, ScopeDef, Semantics, Trait}; | ||
4 | use syntax::ast::{self, make}; | ||
5 | |||
6 | pub mod insert_use; | ||
7 | |||
8 | /// Converts the mod path struct into its ast representation. | ||
9 | pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path { | ||
10 | let _p = profile::span("mod_path_to_ast"); | ||
11 | |||
12 | let mut segments = Vec::new(); | ||
13 | let mut is_abs = false; | ||
14 | match path.kind { | ||
15 | hir::PathKind::Plain => {} | ||
16 | hir::PathKind::Super(0) => segments.push(make::path_segment_self()), | ||
17 | hir::PathKind::Super(n) => segments.extend((0..n).map(|_| make::path_segment_super())), | ||
18 | hir::PathKind::DollarCrate(_) | hir::PathKind::Crate => { | ||
19 | segments.push(make::path_segment_crate()) | ||
20 | } | ||
21 | hir::PathKind::Abs => is_abs = true, | ||
22 | } | ||
23 | |||
24 | segments.extend( | ||
25 | path.segments | ||
26 | .iter() | ||
27 | .map(|segment| make::path_segment(make::name_ref(&segment.to_string()))), | ||
28 | ); | ||
29 | make::path_from_segments(segments, is_abs) | ||
30 | } | ||
31 | |||
32 | /// Helps with finding well-know things inside the standard library. This is | ||
33 | /// somewhat similar to the known paths infra inside hir, but it different; We | ||
34 | /// want to make sure that IDE specific paths don't become interesting inside | ||
35 | /// the compiler itself as well. | ||
36 | pub struct FamousDefs<'a, 'b>(pub &'a Semantics<'b, RootDatabase>, pub Option<Crate>); | ||
37 | |||
38 | #[allow(non_snake_case)] | ||
39 | impl FamousDefs<'_, '_> { | ||
40 | pub const FIXTURE: &'static str = r#"//- /libcore.rs crate:core | ||
41 | pub mod convert { | ||
42 | pub trait From<T> { | ||
43 | fn from(t: T) -> Self; | ||
44 | } | ||
45 | } | ||
46 | |||
47 | pub mod default { | ||
48 | pub trait Default { | ||
49 | fn default() -> Self; | ||
50 | } | ||
51 | } | ||
52 | |||
53 | pub mod iter { | ||
54 | pub use self::traits::{collect::IntoIterator, iterator::Iterator}; | ||
55 | mod traits { | ||
56 | pub(crate) mod iterator { | ||
57 | use crate::option::Option; | ||
58 | pub trait Iterator { | ||
59 | type Item; | ||
60 | fn next(&mut self) -> Option<Self::Item>; | ||
61 | fn by_ref(&mut self) -> &mut Self { | ||
62 | self | ||
63 | } | ||
64 | fn take(self, n: usize) -> crate::iter::Take<Self> { | ||
65 | crate::iter::Take { inner: self } | ||
66 | } | ||
67 | } | ||
68 | |||
69 | impl<I: Iterator> Iterator for &mut I { | ||
70 | type Item = I::Item; | ||
71 | fn next(&mut self) -> Option<I::Item> { | ||
72 | (**self).next() | ||
73 | } | ||
74 | } | ||
75 | } | ||
76 | pub(crate) mod collect { | ||
77 | pub trait IntoIterator { | ||
78 | type Item; | ||
79 | } | ||
80 | } | ||
81 | } | ||
82 | |||
83 | pub use self::sources::*; | ||
84 | pub(crate) mod sources { | ||
85 | use super::Iterator; | ||
86 | use crate::option::Option::{self, *}; | ||
87 | pub struct Repeat<A> { | ||
88 | element: A, | ||
89 | } | ||
90 | |||
91 | pub fn repeat<T>(elt: T) -> Repeat<T> { | ||
92 | Repeat { element: elt } | ||
93 | } | ||
94 | |||
95 | impl<A> Iterator for Repeat<A> { | ||
96 | type Item = A; | ||
97 | |||
98 | fn next(&mut self) -> Option<A> { | ||
99 | None | ||
100 | } | ||
101 | } | ||
102 | } | ||
103 | |||
104 | pub use self::adapters::*; | ||
105 | pub(crate) mod adapters { | ||
106 | use super::Iterator; | ||
107 | use crate::option::Option::{self, *}; | ||
108 | pub struct Take<I> { pub(crate) inner: I } | ||
109 | impl<I> Iterator for Take<I> where I: Iterator { | ||
110 | type Item = <I as Iterator>::Item; | ||
111 | fn next(&mut self) -> Option<<I as Iterator>::Item> { | ||
112 | None | ||
113 | } | ||
114 | } | ||
115 | } | ||
116 | } | ||
117 | |||
118 | pub mod option { | ||
119 | pub enum Option<T> { None, Some(T)} | ||
120 | } | ||
121 | |||
122 | pub mod prelude { | ||
123 | pub use crate::{convert::From, iter::{IntoIterator, Iterator}, option::Option::{self, *}, default::Default}; | ||
124 | } | ||
125 | #[prelude_import] | ||
126 | pub use prelude::*; | ||
127 | "#; | ||
128 | |||
129 | pub fn core(&self) -> Option<Crate> { | ||
130 | self.find_crate("core") | ||
131 | } | ||
132 | |||
133 | pub fn core_convert_From(&self) -> Option<Trait> { | ||
134 | self.find_trait("core:convert:From") | ||
135 | } | ||
136 | |||
137 | pub fn core_option_Option(&self) -> Option<Enum> { | ||
138 | self.find_enum("core:option:Option") | ||
139 | } | ||
140 | |||
141 | pub fn core_default_Default(&self) -> Option<Trait> { | ||
142 | self.find_trait("core:default:Default") | ||
143 | } | ||
144 | |||
145 | pub fn core_iter_Iterator(&self) -> Option<Trait> { | ||
146 | self.find_trait("core:iter:traits:iterator:Iterator") | ||
147 | } | ||
148 | |||
149 | pub fn core_iter(&self) -> Option<Module> { | ||
150 | self.find_module("core:iter") | ||
151 | } | ||
152 | |||
153 | fn find_trait(&self, path: &str) -> Option<Trait> { | ||
154 | match self.find_def(path)? { | ||
155 | hir::ScopeDef::ModuleDef(hir::ModuleDef::Trait(it)) => Some(it), | ||
156 | _ => None, | ||
157 | } | ||
158 | } | ||
159 | |||
160 | fn find_enum(&self, path: &str) -> Option<Enum> { | ||
161 | match self.find_def(path)? { | ||
162 | hir::ScopeDef::ModuleDef(hir::ModuleDef::Adt(hir::Adt::Enum(it))) => Some(it), | ||
163 | _ => None, | ||
164 | } | ||
165 | } | ||
166 | |||
167 | fn find_module(&self, path: &str) -> Option<Module> { | ||
168 | match self.find_def(path)? { | ||
169 | hir::ScopeDef::ModuleDef(hir::ModuleDef::Module(it)) => Some(it), | ||
170 | _ => None, | ||
171 | } | ||
172 | } | ||
173 | |||
174 | fn find_crate(&self, name: &str) -> Option<Crate> { | ||
175 | let krate = self.1?; | ||
176 | let db = self.0.db; | ||
177 | let res = | ||
178 | krate.dependencies(db).into_iter().find(|dep| dep.name.to_string() == name)?.krate; | ||
179 | Some(res) | ||
180 | } | ||
181 | |||
182 | fn find_def(&self, path: &str) -> Option<ScopeDef> { | ||
183 | let db = self.0.db; | ||
184 | let mut path = path.split(':'); | ||
185 | let trait_ = path.next_back()?; | ||
186 | let std_crate = path.next()?; | ||
187 | let std_crate = self.find_crate(std_crate)?; | ||
188 | let mut module = std_crate.root_module(db); | ||
189 | for segment in path { | ||
190 | module = module.children(db).find_map(|child| { | ||
191 | let name = child.name(db)?; | ||
192 | if name.to_string() == segment { | ||
193 | Some(child) | ||
194 | } else { | ||
195 | None | ||
196 | } | ||
197 | })?; | ||
198 | } | ||
199 | let def = | ||
200 | module.scope(db, None).into_iter().find(|(name, _def)| name.to_string() == trait_)?.1; | ||
201 | Some(def) | ||
202 | } | ||
203 | } | ||
diff --git a/crates/assists/src/utils/insert_use.rs b/crates/ide_db/src/helpers/insert_use.rs index 423782a0e..67e800fad 100644 --- a/crates/assists/src/utils/insert_use.rs +++ b/crates/ide_db/src/helpers/insert_use.rs | |||
@@ -1,8 +1,8 @@ | |||
1 | //! Handle syntactic aspects of inserting a new `use`. | 1 | //! Handle syntactic aspects of inserting a new `use`. |
2 | use std::{cmp::Ordering, iter::successors}; | 2 | use std::{cmp::Ordering, iter::successors}; |
3 | 3 | ||
4 | use crate::RootDatabase; | ||
4 | use hir::Semantics; | 5 | use hir::Semantics; |
5 | use ide_db::RootDatabase; | ||
6 | use itertools::{EitherOrBoth, Itertools}; | 6 | use itertools::{EitherOrBoth, Itertools}; |
7 | use syntax::{ | 7 | use syntax::{ |
8 | algo::SyntaxRewriter, | 8 | algo::SyntaxRewriter, |
@@ -22,7 +22,7 @@ pub enum ImportScope { | |||
22 | } | 22 | } |
23 | 23 | ||
24 | impl ImportScope { | 24 | impl ImportScope { |
25 | pub(crate) fn from(syntax: SyntaxNode) -> Option<Self> { | 25 | pub fn from(syntax: SyntaxNode) -> Option<Self> { |
26 | if let Some(module) = ast::Module::cast(syntax.clone()) { | 26 | if let Some(module) = ast::Module::cast(syntax.clone()) { |
27 | module.item_list().map(ImportScope::Module) | 27 | module.item_list().map(ImportScope::Module) |
28 | } else if let this @ Some(_) = ast::SourceFile::cast(syntax.clone()) { | 28 | } else if let this @ Some(_) = ast::SourceFile::cast(syntax.clone()) { |
@@ -95,6 +95,7 @@ pub fn insert_use<'a>( | |||
95 | path: ast::Path, | 95 | path: ast::Path, |
96 | merge: Option<MergeBehaviour>, | 96 | merge: Option<MergeBehaviour>, |
97 | ) -> SyntaxRewriter<'a> { | 97 | ) -> SyntaxRewriter<'a> { |
98 | let _p = profile::span("insert_use"); | ||
98 | let mut rewriter = SyntaxRewriter::default(); | 99 | let mut rewriter = SyntaxRewriter::default(); |
99 | let use_item = make::use_(make::use_tree(path.clone(), None, None, false)); | 100 | let use_item = make::use_(make::use_tree(path.clone(), None, None, false)); |
100 | // merge into existing imports if possible | 101 | // merge into existing imports if possible |
@@ -179,7 +180,7 @@ fn eq_visibility(vis0: Option<ast::Visibility>, vis1: Option<ast::Visibility>) - | |||
179 | } | 180 | } |
180 | } | 181 | } |
181 | 182 | ||
182 | pub(crate) fn try_merge_imports( | 183 | pub fn try_merge_imports( |
183 | lhs: &ast::Use, | 184 | lhs: &ast::Use, |
184 | rhs: &ast::Use, | 185 | rhs: &ast::Use, |
185 | merge_behaviour: MergeBehaviour, | 186 | merge_behaviour: MergeBehaviour, |
@@ -194,7 +195,7 @@ pub(crate) fn try_merge_imports( | |||
194 | Some(lhs.with_use_tree(merged)) | 195 | Some(lhs.with_use_tree(merged)) |
195 | } | 196 | } |
196 | 197 | ||
197 | pub(crate) fn try_merge_trees( | 198 | pub fn try_merge_trees( |
198 | lhs: &ast::UseTree, | 199 | lhs: &ast::UseTree, |
199 | rhs: &ast::UseTree, | 200 | rhs: &ast::UseTree, |
200 | merge: MergeBehaviour, | 201 | merge: MergeBehaviour, |
diff --git a/crates/ide_db/src/imports_locator.rs b/crates/ide_db/src/imports_locator.rs index 9d8ea7368..09046d3c3 100644 --- a/crates/ide_db/src/imports_locator.rs +++ b/crates/ide_db/src/imports_locator.rs | |||
@@ -36,8 +36,15 @@ pub fn find_similar_imports<'a>( | |||
36 | krate: Crate, | 36 | krate: Crate, |
37 | name_to_import: &str, | 37 | name_to_import: &str, |
38 | limit: usize, | 38 | limit: usize, |
39 | ignore_modules: bool, | ||
39 | ) -> impl Iterator<Item = Either<ModuleDef, MacroDef>> { | 40 | ) -> impl Iterator<Item = Either<ModuleDef, MacroDef>> { |
40 | let _p = profile::span("find_similar_imports"); | 41 | let _p = profile::span("find_similar_imports"); |
42 | |||
43 | let mut external_query = import_map::Query::new(name_to_import).limit(limit); | ||
44 | if ignore_modules { | ||
45 | external_query = external_query.exclude_import_kind(import_map::ImportKind::Module); | ||
46 | } | ||
47 | |||
41 | find_imports( | 48 | find_imports( |
42 | sema, | 49 | sema, |
43 | krate, | 50 | krate, |
@@ -46,7 +53,7 @@ pub fn find_similar_imports<'a>( | |||
46 | local_query.limit(limit); | 53 | local_query.limit(limit); |
47 | local_query | 54 | local_query |
48 | }, | 55 | }, |
49 | import_map::Query::new(name_to_import).limit(limit), | 56 | external_query, |
50 | ) | 57 | ) |
51 | } | 58 | } |
52 | 59 | ||
diff --git a/crates/ide_db/src/lib.rs b/crates/ide_db/src/lib.rs index 38ebdbf79..fceaa089a 100644 --- a/crates/ide_db/src/lib.rs +++ b/crates/ide_db/src/lib.rs | |||
@@ -13,6 +13,7 @@ pub mod source_change; | |||
13 | pub mod ty_filter; | 13 | pub mod ty_filter; |
14 | pub mod traits; | 14 | pub mod traits; |
15 | pub mod call_info; | 15 | pub mod call_info; |
16 | pub mod helpers; | ||
16 | 17 | ||
17 | use std::{fmt, sync::Arc}; | 18 | use std::{fmt, sync::Arc}; |
18 | 19 | ||
@@ -113,7 +114,7 @@ impl RootDatabase { | |||
113 | pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) { | 114 | pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) { |
114 | let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_LRU_CAP); | 115 | let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_LRU_CAP); |
115 | base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); | 116 | base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); |
116 | hir::db::ParseMacroQuery.in_db_mut(self).set_lru_capacity(lru_capacity); | 117 | hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(lru_capacity); |
117 | hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(lru_capacity); | 118 | hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(lru_capacity); |
118 | } | 119 | } |
119 | } | 120 | } |
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index f854ca09a..3ad609a00 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs | |||
@@ -12,6 +12,8 @@ mod subtree_source; | |||
12 | #[cfg(test)] | 12 | #[cfg(test)] |
13 | mod tests; | 13 | mod tests; |
14 | 14 | ||
15 | use std::fmt; | ||
16 | |||
15 | pub use tt::{Delimiter, Punct}; | 17 | pub use tt::{Delimiter, Punct}; |
16 | 18 | ||
17 | use crate::{ | 19 | use crate::{ |
@@ -33,6 +35,8 @@ pub enum ExpandError { | |||
33 | ConversionError, | 35 | ConversionError, |
34 | InvalidRepeat, | 36 | InvalidRepeat, |
35 | ProcMacroError(tt::ExpansionError), | 37 | ProcMacroError(tt::ExpansionError), |
38 | UnresolvedProcMacro, | ||
39 | Other(String), | ||
36 | } | 40 | } |
37 | 41 | ||
38 | impl From<tt::ExpansionError> for ExpandError { | 42 | impl From<tt::ExpansionError> for ExpandError { |
@@ -41,6 +45,21 @@ impl From<tt::ExpansionError> for ExpandError { | |||
41 | } | 45 | } |
42 | } | 46 | } |
43 | 47 | ||
48 | impl fmt::Display for ExpandError { | ||
49 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
50 | match self { | ||
51 | ExpandError::NoMatchingRule => f.write_str("no rule matches input tokens"), | ||
52 | ExpandError::UnexpectedToken => f.write_str("unexpected token in input"), | ||
53 | ExpandError::BindingError(e) => f.write_str(e), | ||
54 | ExpandError::ConversionError => f.write_str("could not convert tokens"), | ||
55 | ExpandError::InvalidRepeat => f.write_str("invalid repeat expression"), | ||
56 | ExpandError::ProcMacroError(e) => e.fmt(f), | ||
57 | ExpandError::UnresolvedProcMacro => f.write_str("unresolved proc macro"), | ||
58 | ExpandError::Other(e) => f.write_str(e), | ||
59 | } | ||
60 | } | ||
61 | } | ||
62 | |||
44 | pub use crate::syntax_bridge::{ | 63 | pub use crate::syntax_bridge::{ |
45 | ast_to_token_tree, parse_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, | 64 | ast_to_token_tree, parse_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, |
46 | TokenMap, | 65 | TokenMap, |
@@ -246,33 +265,42 @@ fn validate(pattern: &tt::Subtree) -> Result<(), ParseError> { | |||
246 | Ok(()) | 265 | Ok(()) |
247 | } | 266 | } |
248 | 267 | ||
249 | #[derive(Debug)] | 268 | #[derive(Debug, Clone, Eq, PartialEq)] |
250 | pub struct ExpandResult<T>(pub T, pub Option<ExpandError>); | 269 | pub struct ExpandResult<T> { |
270 | pub value: T, | ||
271 | pub err: Option<ExpandError>, | ||
272 | } | ||
251 | 273 | ||
252 | impl<T> ExpandResult<T> { | 274 | impl<T> ExpandResult<T> { |
253 | pub fn ok(t: T) -> ExpandResult<T> { | 275 | pub fn ok(value: T) -> Self { |
254 | ExpandResult(t, None) | 276 | Self { value, err: None } |
277 | } | ||
278 | |||
279 | pub fn only_err(err: ExpandError) -> Self | ||
280 | where | ||
281 | T: Default, | ||
282 | { | ||
283 | Self { value: Default::default(), err: Some(err) } | ||
255 | } | 284 | } |
256 | 285 | ||
257 | pub fn only_err(err: ExpandError) -> ExpandResult<T> | 286 | pub fn str_err(err: String) -> Self |
258 | where | 287 | where |
259 | T: Default, | 288 | T: Default, |
260 | { | 289 | { |
261 | ExpandResult(Default::default(), Some(err)) | 290 | Self::only_err(ExpandError::Other(err)) |
262 | } | 291 | } |
263 | 292 | ||
264 | pub fn map<U>(self, f: impl FnOnce(T) -> U) -> ExpandResult<U> { | 293 | pub fn map<U>(self, f: impl FnOnce(T) -> U) -> ExpandResult<U> { |
265 | ExpandResult(f(self.0), self.1) | 294 | ExpandResult { value: f(self.value), err: self.err } |
266 | } | 295 | } |
267 | 296 | ||
268 | pub fn result(self) -> Result<T, ExpandError> { | 297 | pub fn result(self) -> Result<T, ExpandError> { |
269 | self.1.map(Err).unwrap_or(Ok(self.0)) | 298 | self.err.map(Err).unwrap_or(Ok(self.value)) |
270 | } | 299 | } |
271 | } | 300 | } |
272 | 301 | ||
273 | impl<T: Default> From<Result<T, ExpandError>> for ExpandResult<T> { | 302 | impl<T: Default> From<Result<T, ExpandError>> for ExpandResult<T> { |
274 | fn from(result: Result<T, ExpandError>) -> ExpandResult<T> { | 303 | fn from(result: Result<T, ExpandError>) -> Self { |
275 | result | 304 | result.map_or_else(|e| Self::only_err(e), |it| Self::ok(it)) |
276 | .map_or_else(|e| ExpandResult(Default::default(), Some(e)), |it| ExpandResult(it, None)) | ||
277 | } | 305 | } |
278 | } | 306 | } |
diff --git a/crates/mbe/src/mbe_expander.rs b/crates/mbe/src/mbe_expander.rs index 1ad8b9f8a..97bce0536 100644 --- a/crates/mbe/src/mbe_expander.rs +++ b/crates/mbe/src/mbe_expander.rs | |||
@@ -28,10 +28,10 @@ fn expand_rules(rules: &[crate::Rule], input: &tt::Subtree) -> ExpandResult<tt:: | |||
28 | // If we find a rule that applies without errors, we're done. | 28 | // If we find a rule that applies without errors, we're done. |
29 | // Unconditionally returning the transcription here makes the | 29 | // Unconditionally returning the transcription here makes the |
30 | // `test_repeat_bad_var` test fail. | 30 | // `test_repeat_bad_var` test fail. |
31 | let ExpandResult(res, transcribe_err) = | 31 | let ExpandResult { value, err: transcribe_err } = |
32 | transcriber::transcribe(&rule.rhs, &new_match.bindings); | 32 | transcriber::transcribe(&rule.rhs, &new_match.bindings); |
33 | if transcribe_err.is_none() { | 33 | if transcribe_err.is_none() { |
34 | return ExpandResult::ok(res); | 34 | return ExpandResult::ok(value); |
35 | } | 35 | } |
36 | } | 36 | } |
37 | // Use the rule if we matched more tokens, or had fewer errors | 37 | // Use the rule if we matched more tokens, or had fewer errors |
@@ -47,11 +47,11 @@ fn expand_rules(rules: &[crate::Rule], input: &tt::Subtree) -> ExpandResult<tt:: | |||
47 | } | 47 | } |
48 | if let Some((match_, rule)) = match_ { | 48 | if let Some((match_, rule)) = match_ { |
49 | // if we got here, there was no match without errors | 49 | // if we got here, there was no match without errors |
50 | let ExpandResult(result, transcribe_err) = | 50 | let ExpandResult { value, err: transcribe_err } = |
51 | transcriber::transcribe(&rule.rhs, &match_.bindings); | 51 | transcriber::transcribe(&rule.rhs, &match_.bindings); |
52 | ExpandResult(result, match_.err.or(transcribe_err)) | 52 | ExpandResult { value, err: match_.err.or(transcribe_err) } |
53 | } else { | 53 | } else { |
54 | ExpandResult(tt::Subtree::default(), Some(ExpandError::NoMatchingRule)) | 54 | ExpandResult::only_err(ExpandError::NoMatchingRule) |
55 | } | 55 | } |
56 | } | 56 | } |
57 | 57 | ||
@@ -143,7 +143,10 @@ mod tests { | |||
143 | } | 143 | } |
144 | 144 | ||
145 | fn assert_err(macro_body: &str, invocation: &str, err: ExpandError) { | 145 | fn assert_err(macro_body: &str, invocation: &str, err: ExpandError) { |
146 | assert_eq!(expand_first(&create_rules(&format_macro(macro_body)), invocation).1, Some(err)); | 146 | assert_eq!( |
147 | expand_first(&create_rules(&format_macro(macro_body)), invocation).err, | ||
148 | Some(err) | ||
149 | ); | ||
147 | } | 150 | } |
148 | 151 | ||
149 | fn format_macro(macro_body: &str) -> String { | 152 | fn format_macro(macro_body: &str) -> String { |
diff --git a/crates/mbe/src/mbe_expander/matcher.rs b/crates/mbe/src/mbe_expander/matcher.rs index 39a8eefbd..3f8445897 100644 --- a/crates/mbe/src/mbe_expander/matcher.rs +++ b/crates/mbe/src/mbe_expander/matcher.rs | |||
@@ -158,7 +158,8 @@ fn match_subtree( | |||
158 | continue; | 158 | continue; |
159 | } | 159 | } |
160 | }; | 160 | }; |
161 | let ExpandResult(matched, match_err) = match_meta_var(kind.as_str(), src); | 161 | let ExpandResult { value: matched, err: match_err } = |
162 | match_meta_var(kind.as_str(), src); | ||
162 | match matched { | 163 | match matched { |
163 | Some(fragment) => { | 164 | Some(fragment) => { |
164 | res.bindings.inner.insert(name.clone(), Binding::Fragment(fragment)); | 165 | res.bindings.inner.insert(name.clone(), Binding::Fragment(fragment)); |
@@ -342,17 +343,17 @@ impl<'a> TtIter<'a> { | |||
342 | token_trees: res.into_iter().cloned().collect(), | 343 | token_trees: res.into_iter().cloned().collect(), |
343 | })), | 344 | })), |
344 | }; | 345 | }; |
345 | ExpandResult(res, err) | 346 | ExpandResult { value: res, err } |
346 | } | 347 | } |
347 | 348 | ||
348 | pub(crate) fn eat_vis(&mut self) -> Option<tt::TokenTree> { | 349 | pub(crate) fn eat_vis(&mut self) -> Option<tt::TokenTree> { |
349 | let mut fork = self.clone(); | 350 | let mut fork = self.clone(); |
350 | match fork.expect_fragment(Visibility) { | 351 | match fork.expect_fragment(Visibility) { |
351 | ExpandResult(tt, None) => { | 352 | ExpandResult { value: tt, err: None } => { |
352 | *self = fork; | 353 | *self = fork; |
353 | tt | 354 | tt |
354 | } | 355 | } |
355 | ExpandResult(_, Some(_)) => None, | 356 | ExpandResult { value: _, err: Some(_) } => None, |
356 | } | 357 | } |
357 | } | 358 | } |
358 | } | 359 | } |
diff --git a/crates/mbe/src/mbe_expander/transcriber.rs b/crates/mbe/src/mbe_expander/transcriber.rs index c9525c5bf..616119ba9 100644 --- a/crates/mbe/src/mbe_expander/transcriber.rs +++ b/crates/mbe/src/mbe_expander/transcriber.rs | |||
@@ -93,17 +93,18 @@ fn expand_subtree( | |||
93 | match op { | 93 | match op { |
94 | Op::TokenTree(tt @ tt::TokenTree::Leaf(..)) => arena.push(tt.clone()), | 94 | Op::TokenTree(tt @ tt::TokenTree::Leaf(..)) => arena.push(tt.clone()), |
95 | Op::TokenTree(tt::TokenTree::Subtree(tt)) => { | 95 | Op::TokenTree(tt::TokenTree::Subtree(tt)) => { |
96 | let ExpandResult(tt, e) = expand_subtree(ctx, tt, arena); | 96 | let ExpandResult { value: tt, err: e } = expand_subtree(ctx, tt, arena); |
97 | err = err.or(e); | 97 | err = err.or(e); |
98 | arena.push(tt.into()); | 98 | arena.push(tt.into()); |
99 | } | 99 | } |
100 | Op::Var { name, kind: _ } => { | 100 | Op::Var { name, kind: _ } => { |
101 | let ExpandResult(fragment, e) = expand_var(ctx, name); | 101 | let ExpandResult { value: fragment, err: e } = expand_var(ctx, name); |
102 | err = err.or(e); | 102 | err = err.or(e); |
103 | push_fragment(arena, fragment); | 103 | push_fragment(arena, fragment); |
104 | } | 104 | } |
105 | Op::Repeat { subtree, kind, separator } => { | 105 | Op::Repeat { subtree, kind, separator } => { |
106 | let ExpandResult(fragment, e) = expand_repeat(ctx, subtree, kind, separator, arena); | 106 | let ExpandResult { value: fragment, err: e } = |
107 | expand_repeat(ctx, subtree, kind, separator, arena); | ||
107 | err = err.or(e); | 108 | err = err.or(e); |
108 | push_fragment(arena, fragment) | 109 | push_fragment(arena, fragment) |
109 | } | 110 | } |
@@ -111,7 +112,7 @@ fn expand_subtree( | |||
111 | } | 112 | } |
112 | // drain the elements added in this instance of expand_subtree | 113 | // drain the elements added in this instance of expand_subtree |
113 | let tts = arena.drain(start_elements..arena.len()).collect(); | 114 | let tts = arena.drain(start_elements..arena.len()).collect(); |
114 | ExpandResult(tt::Subtree { delimiter: template.delimiter, token_trees: tts }, err) | 115 | ExpandResult { value: tt::Subtree { delimiter: template.delimiter, token_trees: tts }, err } |
115 | } | 116 | } |
116 | 117 | ||
117 | fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> ExpandResult<Fragment> { | 118 | fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> ExpandResult<Fragment> { |
@@ -152,7 +153,7 @@ fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> ExpandResult<Fragment> { | |||
152 | ExpandResult::ok(Fragment::Tokens(tt)) | 153 | ExpandResult::ok(Fragment::Tokens(tt)) |
153 | } else { | 154 | } else { |
154 | ctx.bindings.get(&v, &mut ctx.nesting).map_or_else( | 155 | ctx.bindings.get(&v, &mut ctx.nesting).map_or_else( |
155 | |e| ExpandResult(Fragment::Tokens(tt::TokenTree::empty()), Some(e)), | 156 | |e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) }, |
156 | |b| ExpandResult::ok(b.clone()), | 157 | |b| ExpandResult::ok(b.clone()), |
157 | ) | 158 | ) |
158 | } | 159 | } |
@@ -174,7 +175,7 @@ fn expand_repeat( | |||
174 | let mut counter = 0; | 175 | let mut counter = 0; |
175 | 176 | ||
176 | loop { | 177 | loop { |
177 | let ExpandResult(mut t, e) = expand_subtree(ctx, template, arena); | 178 | let ExpandResult { value: mut t, err: e } = expand_subtree(ctx, template, arena); |
178 | let nesting_state = ctx.nesting.last_mut().unwrap(); | 179 | let nesting_state = ctx.nesting.last_mut().unwrap(); |
179 | if nesting_state.at_end || !nesting_state.hit { | 180 | if nesting_state.at_end || !nesting_state.hit { |
180 | break; | 181 | break; |
@@ -234,7 +235,10 @@ fn expand_repeat( | |||
234 | let tt = tt::Subtree { delimiter: None, token_trees: buf }.into(); | 235 | let tt = tt::Subtree { delimiter: None, token_trees: buf }.into(); |
235 | 236 | ||
236 | if RepeatKind::OneOrMore == kind && counter == 0 { | 237 | if RepeatKind::OneOrMore == kind && counter == 0 { |
237 | return ExpandResult(Fragment::Tokens(tt), Some(ExpandError::UnexpectedToken)); | 238 | return ExpandResult { |
239 | value: Fragment::Tokens(tt), | ||
240 | err: Some(ExpandError::UnexpectedToken), | ||
241 | }; | ||
238 | } | 242 | } |
239 | ExpandResult::ok(Fragment::Tokens(tt)) | 243 | ExpandResult::ok(Fragment::Tokens(tt)) |
240 | } | 244 | } |
diff --git a/crates/parser/src/grammar/items.rs b/crates/parser/src/grammar/items.rs index 780bc470a..ad29b82f7 100644 --- a/crates/parser/src/grammar/items.rs +++ b/crates/parser/src/grammar/items.rs | |||
@@ -112,7 +112,7 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker) -> Result<(), Marker> { | |||
112 | has_mods = true; | 112 | has_mods = true; |
113 | } | 113 | } |
114 | 114 | ||
115 | if p.at(T![extern]) { | 115 | if p.at(T![extern]) && p.nth(1) != T!['{'] && (p.nth(1) != STRING || p.nth(2) != T!['{']) { |
116 | has_mods = true; | 116 | has_mods = true; |
117 | abi(p); | 117 | abi(p); |
118 | } | 118 | } |
@@ -181,6 +181,14 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker) -> Result<(), Marker> { | |||
181 | T![type] => { | 181 | T![type] => { |
182 | type_alias(p, m); | 182 | type_alias(p, m); |
183 | } | 183 | } |
184 | |||
185 | // unsafe extern "C" {} | ||
186 | T![extern] => { | ||
187 | abi(p); | ||
188 | extern_item_list(p); | ||
189 | m.complete(p, EXTERN_BLOCK); | ||
190 | } | ||
191 | |||
184 | _ => { | 192 | _ => { |
185 | if !has_visibility && !has_mods { | 193 | if !has_visibility && !has_mods { |
186 | return Err(m); | 194 | return Err(m); |
diff --git a/crates/proc_macro_srv/Cargo.toml b/crates/proc_macro_srv/Cargo.toml index 048b32186..729372968 100644 --- a/crates/proc_macro_srv/Cargo.toml +++ b/crates/proc_macro_srv/Cargo.toml | |||
@@ -20,7 +20,7 @@ proc_macro_api = { path = "../proc_macro_api", version = "0.0.0" } | |||
20 | test_utils = { path = "../test_utils", version = "0.0.0" } | 20 | test_utils = { path = "../test_utils", version = "0.0.0" } |
21 | 21 | ||
22 | [dev-dependencies] | 22 | [dev-dependencies] |
23 | cargo_metadata = "0.12.0" | 23 | cargo_metadata = "=0.12.0" |
24 | difference = "2.0.0" | 24 | difference = "2.0.0" |
25 | 25 | ||
26 | # used as proc macro test targets | 26 | # used as proc macro test targets |
diff --git a/crates/project_model/Cargo.toml b/crates/project_model/Cargo.toml index 2d53bcbcc..e0c591603 100644 --- a/crates/project_model/Cargo.toml +++ b/crates/project_model/Cargo.toml | |||
@@ -12,7 +12,7 @@ doctest = false | |||
12 | [dependencies] | 12 | [dependencies] |
13 | log = "0.4.8" | 13 | log = "0.4.8" |
14 | rustc-hash = "1.1.0" | 14 | rustc-hash = "1.1.0" |
15 | cargo_metadata = "0.12.0" | 15 | cargo_metadata = "=0.12.0" |
16 | serde = { version = "1.0.106", features = ["derive"] } | 16 | serde = { version = "1.0.106", features = ["derive"] } |
17 | serde_json = "1.0.48" | 17 | serde_json = "1.0.48" |
18 | anyhow = "1.0.26" | 18 | anyhow = "1.0.26" |
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 56c51486f..08559b53a 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml | |||
@@ -21,7 +21,7 @@ env_logger = { version = "0.8.1", default-features = false } | |||
21 | itertools = "0.9.0" | 21 | itertools = "0.9.0" |
22 | jod-thread = "0.1.0" | 22 | jod-thread = "0.1.0" |
23 | log = "0.4.8" | 23 | log = "0.4.8" |
24 | lsp-types = { version = "0.83.1", features = ["proposed"] } | 24 | lsp-types = { version = "0.84.0", features = ["proposed"] } |
25 | parking_lot = "0.11.0" | 25 | parking_lot = "0.11.0" |
26 | pico-args = "0.3.1" | 26 | pico-args = "0.3.1" |
27 | oorandom = "11.1.2" | 27 | oorandom = "11.1.2" |
@@ -39,6 +39,7 @@ tracing-tree = { version = "0.1.4" } | |||
39 | stdx = { path = "../stdx", version = "0.0.0" } | 39 | stdx = { path = "../stdx", version = "0.0.0" } |
40 | flycheck = { path = "../flycheck", version = "0.0.0" } | 40 | flycheck = { path = "../flycheck", version = "0.0.0" } |
41 | ide = { path = "../ide", version = "0.0.0" } | 41 | ide = { path = "../ide", version = "0.0.0" } |
42 | ide_db = { path = "../ide_db", version = "0.0.0" } | ||
42 | profile = { path = "../profile", version = "0.0.0" } | 43 | profile = { path = "../profile", version = "0.0.0" } |
43 | project_model = { path = "../project_model", version = "0.0.0" } | 44 | project_model = { path = "../project_model", version = "0.0.0" } |
44 | syntax = { path = "../syntax", version = "0.0.0" } | 45 | syntax = { path = "../syntax", version = "0.0.0" } |
@@ -49,7 +50,6 @@ cfg = { path = "../cfg", version = "0.0.0" } | |||
49 | toolchain = { path = "../toolchain", version = "0.0.0" } | 50 | toolchain = { path = "../toolchain", version = "0.0.0" } |
50 | 51 | ||
51 | # This should only be used in CLI | 52 | # This should only be used in CLI |
52 | ide_db = { path = "../ide_db", version = "0.0.0" } | ||
53 | ssr = { path = "../ssr", version = "0.0.0" } | 53 | ssr = { path = "../ssr", version = "0.0.0" } |
54 | hir = { path = "../hir", version = "0.0.0" } | 54 | hir = { path = "../hir", version = "0.0.0" } |
55 | hir_def = { path = "../hir_def", version = "0.0.0" } | 55 | hir_def = { path = "../hir_def", version = "0.0.0" } |
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 5fc6800cf..59269a74b 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs | |||
@@ -11,10 +11,8 @@ use std::{convert::TryFrom, ffi::OsString, path::PathBuf}; | |||
11 | 11 | ||
12 | use flycheck::FlycheckConfig; | 12 | use flycheck::FlycheckConfig; |
13 | use hir::PrefixKind; | 13 | use hir::PrefixKind; |
14 | use ide::{ | 14 | use ide::{AssistConfig, CompletionConfig, DiagnosticsConfig, HoverConfig, InlayHintsConfig}; |
15 | AssistConfig, CompletionConfig, DiagnosticsConfig, HoverConfig, InlayHintsConfig, | 15 | use ide_db::helpers::insert_use::MergeBehaviour; |
16 | MergeBehaviour, | ||
17 | }; | ||
18 | use lsp_types::{ClientCapabilities, MarkupKind}; | 16 | use lsp_types::{ClientCapabilities, MarkupKind}; |
19 | use project_model::{CargoConfig, ProjectJson, ProjectJsonData, ProjectManifest}; | 17 | use project_model::{CargoConfig, ProjectJson, ProjectJsonData, ProjectManifest}; |
20 | use rustc_hash::FxHashSet; | 18 | use rustc_hash::FxHashSet; |
@@ -184,6 +182,7 @@ impl Config { | |||
184 | }, | 182 | }, |
185 | completion: CompletionConfig { | 183 | completion: CompletionConfig { |
186 | enable_postfix_completions: true, | 184 | enable_postfix_completions: true, |
185 | enable_experimental_completions: true, | ||
187 | add_call_parenthesis: true, | 186 | add_call_parenthesis: true, |
188 | add_call_argument_snippets: true, | 187 | add_call_argument_snippets: true, |
189 | ..CompletionConfig::default() | 188 | ..CompletionConfig::default() |
@@ -306,6 +305,7 @@ impl Config { | |||
306 | }; | 305 | }; |
307 | 306 | ||
308 | self.completion.enable_postfix_completions = data.completion_postfix_enable; | 307 | self.completion.enable_postfix_completions = data.completion_postfix_enable; |
308 | self.completion.enable_experimental_completions = data.completion_enableExperimental; | ||
309 | self.completion.add_call_parenthesis = data.completion_addCallParenthesis; | 309 | self.completion.add_call_parenthesis = data.completion_addCallParenthesis; |
310 | self.completion.add_call_argument_snippets = data.completion_addCallArgumentSnippets; | 310 | self.completion.add_call_argument_snippets = data.completion_addCallArgumentSnippets; |
311 | self.completion.merge = self.assist.insert_use.merge; | 311 | self.completion.merge = self.assist.insert_use.merge; |
@@ -480,7 +480,7 @@ macro_rules! config_data { | |||
480 | 480 | ||
481 | config_data! { | 481 | config_data! { |
482 | struct ConfigData { | 482 | struct ConfigData { |
483 | assist_importMergeBehaviour: MergeBehaviourDef = MergeBehaviourDef::None, | 483 | assist_importMergeBehaviour: MergeBehaviourDef = MergeBehaviourDef::Full, |
484 | assist_importPrefix: ImportPrefixDef = ImportPrefixDef::Plain, | 484 | assist_importPrefix: ImportPrefixDef = ImportPrefixDef::Plain, |
485 | 485 | ||
486 | callInfo_full: bool = true, | 486 | callInfo_full: bool = true, |
@@ -506,6 +506,7 @@ config_data! { | |||
506 | completion_addCallArgumentSnippets: bool = true, | 506 | completion_addCallArgumentSnippets: bool = true, |
507 | completion_addCallParenthesis: bool = true, | 507 | completion_addCallParenthesis: bool = true, |
508 | completion_postfix_enable: bool = true, | 508 | completion_postfix_enable: bool = true, |
509 | completion_enableExperimental: bool = true, | ||
509 | 510 | ||
510 | diagnostics_enable: bool = true, | 511 | diagnostics_enable: bool = true, |
511 | diagnostics_enableExperimental: bool = true, | 512 | diagnostics_enableExperimental: bool = true, |
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 6ea08adce..f349b0810 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! The main loop of `rust-analyzer` responsible for dispatching LSP | 1 | //! The main loop of `rust-analyzer` responsible for dispatching LSP |
2 | //! requests/replies and notifications back to the client. | 2 | //! requests/replies and notifications back to the client. |
3 | use std::{ | 3 | use std::{ |
4 | env, fmt, panic, | 4 | env, fmt, |
5 | time::{Duration, Instant}, | 5 | time::{Duration, Instant}, |
6 | }; | 6 | }; |
7 | 7 | ||
@@ -289,55 +289,69 @@ impl GlobalState { | |||
289 | } | 289 | } |
290 | } | 290 | } |
291 | } | 291 | } |
292 | Event::Flycheck(task) => match task { | 292 | Event::Flycheck(mut task) => { |
293 | flycheck::Message::AddDiagnostic { workspace_root, diagnostic } => { | 293 | let _p = profile::span("GlobalState::handle_event/flycheck"); |
294 | let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp( | 294 | loop { |
295 | &self.config.diagnostics_map, | 295 | match task { |
296 | &diagnostic, | 296 | flycheck::Message::AddDiagnostic { workspace_root, diagnostic } => { |
297 | &workspace_root, | 297 | let diagnostics = |
298 | ); | 298 | crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp( |
299 | for diag in diagnostics { | 299 | &self.config.diagnostics_map, |
300 | match url_to_file_id(&self.vfs.read().0, &diag.url) { | 300 | &diagnostic, |
301 | Ok(file_id) => self.diagnostics.add_check_diagnostic( | 301 | &workspace_root, |
302 | file_id, | 302 | ); |
303 | diag.diagnostic, | 303 | for diag in diagnostics { |
304 | diag.fixes, | 304 | match url_to_file_id(&self.vfs.read().0, &diag.url) { |
305 | ), | 305 | Ok(file_id) => self.diagnostics.add_check_diagnostic( |
306 | Err(err) => { | 306 | file_id, |
307 | log::error!("File with cargo diagnostic not found in VFS: {}", err); | 307 | diag.diagnostic, |
308 | } | 308 | diag.fixes, |
309 | }; | 309 | ), |
310 | } | 310 | Err(err) => { |
311 | } | 311 | log::error!( |
312 | 312 | "File with cargo diagnostic not found in VFS: {}", | |
313 | flycheck::Message::Progress { id, progress } => { | 313 | err |
314 | let (state, message) = match progress { | 314 | ); |
315 | flycheck::Progress::DidStart => { | 315 | } |
316 | self.diagnostics.clear_check(); | 316 | }; |
317 | (Progress::Begin, None) | ||
318 | } | ||
319 | flycheck::Progress::DidCheckCrate(target) => { | ||
320 | (Progress::Report, Some(target)) | ||
321 | } | ||
322 | flycheck::Progress::DidCancel => (Progress::End, None), | ||
323 | flycheck::Progress::DidFinish(result) => { | ||
324 | if let Err(err) = result { | ||
325 | log::error!("cargo check failed: {}", err) | ||
326 | } | 317 | } |
327 | (Progress::End, None) | ||
328 | } | 318 | } |
329 | }; | ||
330 | 319 | ||
331 | // When we're running multiple flychecks, we have to include a disambiguator in | 320 | flycheck::Message::Progress { id, progress } => { |
332 | // the title, or the editor complains. Note that this is a user-facing string. | 321 | let (state, message) = match progress { |
333 | let title = if self.flycheck.len() == 1 { | 322 | flycheck::Progress::DidStart => { |
334 | "cargo check".to_string() | 323 | self.diagnostics.clear_check(); |
335 | } else { | 324 | (Progress::Begin, None) |
336 | format!("cargo check (#{})", id + 1) | 325 | } |
337 | }; | 326 | flycheck::Progress::DidCheckCrate(target) => { |
338 | self.report_progress(&title, state, message, None); | 327 | (Progress::Report, Some(target)) |
328 | } | ||
329 | flycheck::Progress::DidCancel => (Progress::End, None), | ||
330 | flycheck::Progress::DidFinish(result) => { | ||
331 | if let Err(err) = result { | ||
332 | log::error!("cargo check failed: {}", err) | ||
333 | } | ||
334 | (Progress::End, None) | ||
335 | } | ||
336 | }; | ||
337 | |||
338 | // When we're running multiple flychecks, we have to include a disambiguator in | ||
339 | // the title, or the editor complains. Note that this is a user-facing string. | ||
340 | let title = if self.flycheck.len() == 1 { | ||
341 | "cargo check".to_string() | ||
342 | } else { | ||
343 | format!("cargo check (#{})", id + 1) | ||
344 | }; | ||
345 | self.report_progress(&title, state, message, None); | ||
346 | } | ||
347 | } | ||
348 | // Coalesce many flycheck updates into a single loop turn | ||
349 | task = match self.flycheck_receiver.try_recv() { | ||
350 | Ok(task) => task, | ||
351 | Err(_) => break, | ||
352 | } | ||
339 | } | 353 | } |
340 | }, | 354 | } |
341 | } | 355 | } |
342 | 356 | ||
343 | let state_changed = self.process_changes(); | 357 | let state_changed = self.process_changes(); |
@@ -348,13 +362,7 @@ impl GlobalState { | |||
348 | } | 362 | } |
349 | 363 | ||
350 | if self.status == Status::Ready && (state_changed || prev_status == Status::Loading) { | 364 | if self.status == Status::Ready && (state_changed || prev_status == Status::Loading) { |
351 | let subscriptions = self | 365 | self.update_file_notifications_on_threadpool(); |
352 | .mem_docs | ||
353 | .keys() | ||
354 | .map(|path| self.vfs.read().0.file_id(&path).unwrap()) | ||
355 | .collect::<Vec<_>>(); | ||
356 | |||
357 | self.update_file_notifications_on_threadpool(subscriptions); | ||
358 | 366 | ||
359 | // Refresh semantic tokens if the client supports it. | 367 | // Refresh semantic tokens if the client supports it. |
360 | if self.config.semantic_tokens_refresh { | 368 | if self.config.semantic_tokens_refresh { |
@@ -498,12 +506,19 @@ impl GlobalState { | |||
498 | .write() | 506 | .write() |
499 | .0 | 507 | .0 |
500 | .set_file_contents(path, Some(params.text_document.text.into_bytes())); | 508 | .set_file_contents(path, Some(params.text_document.text.into_bytes())); |
509 | this.update_file_notifications_on_threadpool(); | ||
501 | } | 510 | } |
502 | Ok(()) | 511 | Ok(()) |
503 | })? | 512 | })? |
504 | .on::<lsp_types::notification::DidChangeTextDocument>(|this, params| { | 513 | .on::<lsp_types::notification::DidChangeTextDocument>(|this, params| { |
505 | if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { | 514 | if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { |
506 | let doc = this.mem_docs.get_mut(&path).unwrap(); | 515 | let doc = match this.mem_docs.get_mut(&path) { |
516 | Some(doc) => doc, | ||
517 | None => { | ||
518 | log::error!("expected DidChangeTextDocument: {}", path); | ||
519 | return Ok(()); | ||
520 | } | ||
521 | }; | ||
507 | let vfs = &mut this.vfs.write().0; | 522 | let vfs = &mut this.vfs.write().0; |
508 | let file_id = vfs.file_id(&path).unwrap(); | 523 | let file_id = vfs.file_id(&path).unwrap(); |
509 | let mut text = String::from_utf8(vfs.file_contents(file_id).to_vec()).unwrap(); | 524 | let mut text = String::from_utf8(vfs.file_contents(file_id).to_vec()).unwrap(); |
@@ -600,7 +615,13 @@ impl GlobalState { | |||
600 | .finish(); | 615 | .finish(); |
601 | Ok(()) | 616 | Ok(()) |
602 | } | 617 | } |
603 | fn update_file_notifications_on_threadpool(&mut self, subscriptions: Vec<FileId>) { | 618 | fn update_file_notifications_on_threadpool(&mut self) { |
619 | let subscriptions = self | ||
620 | .mem_docs | ||
621 | .keys() | ||
622 | .map(|path| self.vfs.read().0.file_id(&path).unwrap()) | ||
623 | .collect::<Vec<_>>(); | ||
624 | |||
604 | log::trace!("updating notifications for {:?}", subscriptions); | 625 | log::trace!("updating notifications for {:?}", subscriptions); |
605 | if self.config.publish_diagnostics { | 626 | if self.config.publish_diagnostics { |
606 | let snapshot = self.snapshot(); | 627 | let snapshot = self.snapshot(); |
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 001bf5949..b2d35f535 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs | |||
@@ -205,7 +205,7 @@ impl GlobalState { | |||
205 | } | 205 | } |
206 | let res = vfs.file_id(&vfs_path); | 206 | let res = vfs.file_id(&vfs_path); |
207 | if res.is_none() { | 207 | if res.is_none() { |
208 | log::error!("failed to load {}", path.display()) | 208 | log::warn!("failed to load {}", path.display()) |
209 | } | 209 | } |
210 | res | 210 | res |
211 | }; | 211 | }; |
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 2f35425bb..2052b800c 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs | |||
@@ -629,12 +629,21 @@ pub(crate) fn resource_op( | |||
629 | match file_system_edit { | 629 | match file_system_edit { |
630 | FileSystemEdit::CreateFile { anchor, dst } => { | 630 | FileSystemEdit::CreateFile { anchor, dst } => { |
631 | let uri = snap.anchored_path(anchor, &dst); | 631 | let uri = snap.anchored_path(anchor, &dst); |
632 | lsp_types::ResourceOp::Create(lsp_types::CreateFile { uri, options: None }) | 632 | lsp_types::ResourceOp::Create(lsp_types::CreateFile { |
633 | uri, | ||
634 | options: None, | ||
635 | annotation: None, | ||
636 | }) | ||
633 | } | 637 | } |
634 | FileSystemEdit::MoveFile { src, anchor, dst } => { | 638 | FileSystemEdit::MoveFile { src, anchor, dst } => { |
635 | let old_uri = snap.file_id_to_url(src); | 639 | let old_uri = snap.file_id_to_url(src); |
636 | let new_uri = snap.anchored_path(anchor, &dst); | 640 | let new_uri = snap.anchored_path(anchor, &dst); |
637 | lsp_types::ResourceOp::Rename(lsp_types::RenameFile { old_uri, new_uri, options: None }) | 641 | lsp_types::ResourceOp::Rename(lsp_types::RenameFile { |
642 | old_uri, | ||
643 | new_uri, | ||
644 | options: None, | ||
645 | annotation: None, | ||
646 | }) | ||
638 | } | 647 | } |
639 | } | 648 | } |
640 | } | 649 | } |
@@ -684,9 +693,11 @@ impl From<lsp_ext::SnippetWorkspaceEdit> for lsp_types::WorkspaceEdit { | |||
684 | edits: edit | 693 | edits: edit |
685 | .edits | 694 | .edits |
686 | .into_iter() | 695 | .into_iter() |
687 | .map(|edit| lsp_types::TextEdit { | 696 | .map(|edit| { |
688 | range: edit.range, | 697 | lsp_types::OneOf::Left(lsp_types::TextEdit { |
689 | new_text: edit.new_text, | 698 | range: edit.range, |
699 | new_text: edit.new_text, | ||
700 | }) | ||
690 | }) | 701 | }) |
691 | .collect(), | 702 | .collect(), |
692 | }, | 703 | }, |
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 1fe907753..ce62babc3 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml | |||
@@ -28,6 +28,7 @@ stdx = { path = "../stdx", version = "0.0.0" } | |||
28 | text_edit = { path = "../text_edit", version = "0.0.0" } | 28 | text_edit = { path = "../text_edit", version = "0.0.0" } |
29 | parser = { path = "../parser", version = "0.0.0" } | 29 | parser = { path = "../parser", version = "0.0.0" } |
30 | test_utils = { path = "../test_utils", version = "0.0.0" } | 30 | test_utils = { path = "../test_utils", version = "0.0.0" } |
31 | profile = { path = "../profile", version = "0.0.0" } | ||
31 | 32 | ||
32 | [dev-dependencies] | 33 | [dev-dependencies] |
33 | walkdir = "2.3.1" | 34 | walkdir = "2.3.1" |
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs index 320c430c9..ee89d9867 100644 --- a/crates/syntax/src/algo.rs +++ b/crates/syntax/src/algo.rs | |||
@@ -127,6 +127,8 @@ pub struct TreeDiff { | |||
127 | 127 | ||
128 | impl TreeDiff { | 128 | impl TreeDiff { |
129 | pub fn into_text_edit(&self, builder: &mut TextEditBuilder) { | 129 | pub fn into_text_edit(&self, builder: &mut TextEditBuilder) { |
130 | let _p = profile::span("into_text_edit"); | ||
131 | |||
130 | for (anchor, to) in self.insertions.iter() { | 132 | for (anchor, to) in self.insertions.iter() { |
131 | let offset = match anchor { | 133 | let offset = match anchor { |
132 | TreeDiffInsertPos::After(it) => it.text_range().end(), | 134 | TreeDiffInsertPos::After(it) => it.text_range().end(), |
@@ -154,6 +156,8 @@ impl TreeDiff { | |||
154 | /// | 156 | /// |
155 | /// This function tries to find a fine-grained diff. | 157 | /// This function tries to find a fine-grained diff. |
156 | pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { | 158 | pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { |
159 | let _p = profile::span("diff"); | ||
160 | |||
157 | let mut diff = TreeDiff { | 161 | let mut diff = TreeDiff { |
158 | replacements: FxHashMap::default(), | 162 | replacements: FxHashMap::default(), |
159 | insertions: FxIndexMap::default(), | 163 | insertions: FxIndexMap::default(), |
@@ -467,6 +471,8 @@ impl<'a> SyntaxRewriter<'a> { | |||
467 | } | 471 | } |
468 | 472 | ||
469 | pub fn rewrite(&self, node: &SyntaxNode) -> SyntaxNode { | 473 | pub fn rewrite(&self, node: &SyntaxNode) -> SyntaxNode { |
474 | let _p = profile::span("rewrite"); | ||
475 | |||
470 | if self.f.is_none() && self.replacements.is_empty() && self.insertions.is_empty() { | 476 | if self.f.is_none() && self.replacements.is_empty() && self.insertions.is_empty() { |
471 | return node.clone(); | 477 | return node.clone(); |
472 | } | 478 | } |
@@ -483,6 +489,7 @@ impl<'a> SyntaxRewriter<'a> { | |||
483 | /// | 489 | /// |
484 | /// Returns `None` when there are no replacements. | 490 | /// Returns `None` when there are no replacements. |
485 | pub fn rewrite_root(&self) -> Option<SyntaxNode> { | 491 | pub fn rewrite_root(&self) -> Option<SyntaxNode> { |
492 | let _p = profile::span("rewrite_root"); | ||
486 | fn element_to_node_or_parent(element: &SyntaxElement) -> SyntaxNode { | 493 | fn element_to_node_or_parent(element: &SyntaxElement) -> SyntaxNode { |
487 | match element { | 494 | match element { |
488 | SyntaxElement::Node(it) => it.clone(), | 495 | SyntaxElement::Node(it) => it.clone(), |
@@ -517,6 +524,8 @@ impl<'a> SyntaxRewriter<'a> { | |||
517 | } | 524 | } |
518 | 525 | ||
519 | fn rewrite_children(&self, node: &SyntaxNode) -> SyntaxNode { | 526 | fn rewrite_children(&self, node: &SyntaxNode) -> SyntaxNode { |
527 | let _p = profile::span("rewrite_children"); | ||
528 | |||
520 | // FIXME: this could be made much faster. | 529 | // FIXME: this could be made much faster. |
521 | let mut new_children = Vec::new(); | 530 | let mut new_children = Vec::new(); |
522 | if let Some(elements) = self.insertions(&InsertPos::FirstChildOf(node.clone())) { | 531 | if let Some(elements) = self.insertions(&InsertPos::FirstChildOf(node.clone())) { |
@@ -533,6 +542,8 @@ impl<'a> SyntaxRewriter<'a> { | |||
533 | acc: &mut Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, | 542 | acc: &mut Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, |
534 | element: &SyntaxElement, | 543 | element: &SyntaxElement, |
535 | ) { | 544 | ) { |
545 | let _p = profile::span("rewrite_self"); | ||
546 | |||
536 | if let Some(replacement) = self.replacement(&element) { | 547 | if let Some(replacement) = self.replacement(&element) { |
537 | match replacement { | 548 | match replacement { |
538 | Replacement::Single(element) => acc.push(element_to_green(element)), | 549 | Replacement::Single(element) => acc.push(element_to_green(element)), |
@@ -588,6 +599,8 @@ fn with_children( | |||
588 | parent: &SyntaxNode, | 599 | parent: &SyntaxNode, |
589 | new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, | 600 | new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, |
590 | ) -> SyntaxNode { | 601 | ) -> SyntaxNode { |
602 | let _p = profile::span("with_children"); | ||
603 | |||
591 | let len = new_children.iter().map(|it| it.text_len()).sum::<TextSize>(); | 604 | let len = new_children.iter().map(|it| it.text_len()).sum::<TextSize>(); |
592 | let new_node = rowan::GreenNode::new(rowan::SyntaxKind(parent.kind() as u16), new_children); | 605 | let new_node = rowan::GreenNode::new(rowan::SyntaxKind(parent.kind() as u16), new_children); |
593 | let new_root_node = parent.replace_with(new_node); | 606 | let new_root_node = parent.replace_with(new_node); |
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index ac0326420..b985861f2 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs | |||
@@ -130,19 +130,28 @@ impl ast::String { | |||
130 | let text = self.text().as_str(); | 130 | let text = self.text().as_str(); |
131 | let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; | 131 | let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; |
132 | 132 | ||
133 | let mut buf = String::with_capacity(text.len()); | 133 | let mut buf = String::new(); |
134 | let mut text_iter = text.chars(); | ||
134 | let mut has_error = false; | 135 | let mut has_error = false; |
135 | unescape_literal(text, Mode::Str, &mut |_, unescaped_char| match unescaped_char { | 136 | unescape_literal(text, Mode::Str, &mut |char_range, unescaped_char| match ( |
136 | Ok(c) => buf.push(c), | 137 | unescaped_char, |
137 | Err(_) => has_error = true, | 138 | buf.capacity() == 0, |
139 | ) { | ||
140 | (Ok(c), false) => buf.push(c), | ||
141 | (Ok(c), true) if Some(c) == text_iter.next() => (), | ||
142 | (Ok(c), true) => { | ||
143 | buf.reserve_exact(text.len()); | ||
144 | buf.push_str(&text[..char_range.start]); | ||
145 | buf.push(c); | ||
146 | } | ||
147 | (Err(_), _) => has_error = true, | ||
138 | }); | 148 | }); |
139 | 149 | ||
140 | if has_error { | 150 | match (has_error, buf.capacity() == 0) { |
141 | return None; | 151 | (true, _) => None, |
152 | (false, true) => Some(Cow::Borrowed(text)), | ||
153 | (false, false) => Some(Cow::Owned(buf)), | ||
142 | } | 154 | } |
143 | // FIXME: don't actually allocate for borrowed case | ||
144 | let res = if buf == text { Cow::Borrowed(text) } else { Cow::Owned(buf) }; | ||
145 | Some(res) | ||
146 | } | 155 | } |
147 | 156 | ||
148 | pub fn quote_offsets(&self) -> Option<QuoteOffsets> { | 157 | pub fn quote_offsets(&self) -> Option<QuoteOffsets> { |
diff --git a/crates/syntax/test_data/parser/ok/0068_item_modifiers.rast b/crates/syntax/test_data/parser/ok/0068_item_modifiers.rast index 50a6d8ee9..87eebf185 100644 --- a/crates/syntax/test_data/parser/ok/0068_item_modifiers.rast +++ b/crates/syntax/test_data/parser/ok/0068_item_modifiers.rast | |||
@@ -1,4 +1,4 @@ | |||
1 | [email protected]04 | 1 | [email protected]28 |
2 | [email protected] | 2 | [email protected] |
3 | [email protected] "async" | 3 | [email protected] "async" |
4 | [email protected] " " | 4 | [email protected] " " |
@@ -215,4 +215,16 @@ [email protected] | |||
215 | [email protected] | 215 | [email protected] |
216 | [email protected] "{" | 216 | [email protected] "{" |
217 | [email protected] "}" | 217 | [email protected] "}" |
218 | [email protected] "\n" | 218 | [email protected] "\n\n" |
219 | [email protected] | ||
220 | [email protected] "unsafe" | ||
221 | [email protected] " " | ||
222 | [email protected] | ||
223 | [email protected] "extern" | ||
224 | [email protected] " " | ||
225 | [email protected] "\"C++\"" | ||
226 | [email protected] " " | ||
227 | [email protected] | ||
228 | [email protected] "{" | ||
229 | [email protected] "}" | ||
230 | [email protected] "\n" | ||
diff --git a/crates/syntax/test_data/parser/ok/0068_item_modifiers.rs b/crates/syntax/test_data/parser/ok/0068_item_modifiers.rs index 8d697c04b..6d27a082c 100644 --- a/crates/syntax/test_data/parser/ok/0068_item_modifiers.rs +++ b/crates/syntax/test_data/parser/ok/0068_item_modifiers.rs | |||
@@ -14,3 +14,5 @@ unsafe auto trait T {} | |||
14 | unsafe impl Foo {} | 14 | unsafe impl Foo {} |
15 | default impl Foo {} | 15 | default impl Foo {} |
16 | unsafe default impl Foo {} | 16 | unsafe default impl Foo {} |
17 | |||
18 | unsafe extern "C++" {} | ||
diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index 20c3f5eab..6c1bf8d09 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs | |||
@@ -1,10 +1,7 @@ | |||
1 | //! `tt` crate defines a `TokenTree` data structure: this is the interface (both | 1 | //! `tt` crate defines a `TokenTree` data structure: this is the interface (both |
2 | //! input and output) of macros. It closely mirrors `proc_macro` crate's | 2 | //! input and output) of macros. It closely mirrors `proc_macro` crate's |
3 | //! `TokenTree`. | 3 | //! `TokenTree`. |
4 | use std::{ | 4 | use std::{fmt, panic::RefUnwindSafe}; |
5 | fmt::{self, Debug}, | ||
6 | panic::RefUnwindSafe, | ||
7 | }; | ||
8 | 5 | ||
9 | use stdx::impl_from; | 6 | use stdx::impl_from; |
10 | 7 | ||
@@ -139,7 +136,7 @@ fn print_debug_token(f: &mut fmt::Formatter<'_>, tkn: &TokenTree, level: usize) | |||
139 | Ok(()) | 136 | Ok(()) |
140 | } | 137 | } |
141 | 138 | ||
142 | impl Debug for Subtree { | 139 | impl fmt::Debug for Subtree { |
143 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | 140 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
144 | print_debug_subtree(f, self, 0) | 141 | print_debug_subtree(f, self, 0) |
145 | } | 142 | } |
@@ -240,7 +237,18 @@ pub enum ExpansionError { | |||
240 | ExpansionError(String), | 237 | ExpansionError(String), |
241 | } | 238 | } |
242 | 239 | ||
243 | pub trait TokenExpander: Debug + Send + Sync + RefUnwindSafe { | 240 | impl fmt::Display for ExpansionError { |
241 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
242 | match self { | ||
243 | ExpansionError::IOError(e) => write!(f, "I/O error: {}", e), | ||
244 | ExpansionError::JsonError(e) => write!(f, "JSON decoding error: {}", e), | ||
245 | ExpansionError::Unknown(e) => e.fmt(f), | ||
246 | ExpansionError::ExpansionError(e) => write!(f, "proc macro returned error: {}", e), | ||
247 | } | ||
248 | } | ||
249 | } | ||
250 | |||
251 | pub trait TokenExpander: fmt::Debug + Send + Sync + RefUnwindSafe { | ||
244 | fn expand(&self, subtree: &Subtree, attrs: Option<&Subtree>) | 252 | fn expand(&self, subtree: &Subtree, attrs: Option<&Subtree>) |
245 | -> Result<Subtree, ExpansionError>; | 253 | -> Result<Subtree, ExpansionError>; |
246 | } | 254 | } |