diff options
Diffstat (limited to 'crates')
46 files changed, 1376 insertions, 635 deletions
diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs index 01a4d205f..508ac37c2 100644 --- a/crates/hir/src/display.rs +++ b/crates/hir/src/display.rs | |||
@@ -170,7 +170,7 @@ impl HirDisplay for Field { | |||
170 | fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { | 170 | fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { |
171 | write_visibility(self.parent.module(f.db).id, self.visibility(f.db), f)?; | 171 | write_visibility(self.parent.module(f.db).id, self.visibility(f.db), f)?; |
172 | write!(f, "{}: ", self.name(f.db))?; | 172 | write!(f, "{}: ", self.name(f.db))?; |
173 | self.signature_ty(f.db).hir_fmt(f) | 173 | self.ty(f.db).hir_fmt(f) |
174 | } | 174 | } |
175 | } | 175 | } |
176 | 176 | ||
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 0acfa582a..6fcc58f61 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs | |||
@@ -509,7 +509,7 @@ impl Field { | |||
509 | /// placeholder types for type parameters). This is good for showing | 509 | /// placeholder types for type parameters). This is good for showing |
510 | /// signature help, but not so good to actually get the type of the field | 510 | /// signature help, but not so good to actually get the type of the field |
511 | /// when you actually have a variable of the struct. | 511 | /// when you actually have a variable of the struct. |
512 | pub fn signature_ty(&self, db: &dyn HirDatabase) -> Type { | 512 | pub fn ty(&self, db: &dyn HirDatabase) -> Type { |
513 | let var_id = self.parent.into(); | 513 | let var_id = self.parent.into(); |
514 | let generic_def_id: GenericDefId = match self.parent { | 514 | let generic_def_id: GenericDefId = match self.parent { |
515 | VariantDef::Struct(it) => it.id.into(), | 515 | VariantDef::Struct(it) => it.id.into(), |
@@ -1744,6 +1744,10 @@ impl Type { | |||
1744 | } | 1744 | } |
1745 | } | 1745 | } |
1746 | 1746 | ||
1747 | pub fn strip_references(&self) -> Type { | ||
1748 | self.derived(self.ty.strip_references().clone()) | ||
1749 | } | ||
1750 | |||
1747 | pub fn is_unknown(&self) -> bool { | 1751 | pub fn is_unknown(&self) -> bool { |
1748 | self.ty.is_unknown() | 1752 | self.ty.is_unknown() |
1749 | } | 1753 | } |
@@ -1984,7 +1988,7 @@ impl Type { | |||
1984 | None | 1988 | None |
1985 | } | 1989 | } |
1986 | 1990 | ||
1987 | pub fn type_parameters(&self) -> impl Iterator<Item = Type> + '_ { | 1991 | pub fn type_arguments(&self) -> impl Iterator<Item = Type> + '_ { |
1988 | self.ty | 1992 | self.ty |
1989 | .strip_references() | 1993 | .strip_references() |
1990 | .as_adt() | 1994 | .as_adt() |
diff --git a/crates/hir_def/src/attr.rs b/crates/hir_def/src/attr.rs index d9294d93a..0171d8a92 100644 --- a/crates/hir_def/src/attr.rs +++ b/crates/hir_def/src/attr.rs | |||
@@ -484,10 +484,10 @@ impl AttrsWithOwner { | |||
484 | let mut buf = String::new(); | 484 | let mut buf = String::new(); |
485 | let mut mapping = Vec::new(); | 485 | let mut mapping = Vec::new(); |
486 | for (doc, idx) in docs { | 486 | for (doc, idx) in docs { |
487 | // str::lines doesn't yield anything for the empty string | ||
488 | if !doc.is_empty() { | 487 | if !doc.is_empty() { |
489 | for line in doc.split('\n') { | 488 | let mut base_offset = 0; |
490 | let line = line.trim_end(); | 489 | for raw_line in doc.split('\n') { |
490 | let line = raw_line.trim_end(); | ||
491 | let line_len = line.len(); | 491 | let line_len = line.len(); |
492 | let (offset, line) = match line.char_indices().nth(indent) { | 492 | let (offset, line) = match line.char_indices().nth(indent) { |
493 | Some((offset, _)) => (offset, &line[offset..]), | 493 | Some((offset, _)) => (offset, &line[offset..]), |
@@ -498,9 +498,13 @@ impl AttrsWithOwner { | |||
498 | mapping.push(( | 498 | mapping.push(( |
499 | TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?), | 499 | TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?), |
500 | idx, | 500 | idx, |
501 | TextRange::new(offset.try_into().ok()?, line_len.try_into().ok()?), | 501 | TextRange::at( |
502 | (base_offset + offset).try_into().ok()?, | ||
503 | line_len.try_into().ok()?, | ||
504 | ), | ||
502 | )); | 505 | )); |
503 | buf.push('\n'); | 506 | buf.push('\n'); |
507 | base_offset += raw_line.len() + 1; | ||
504 | } | 508 | } |
505 | } else { | 509 | } else { |
506 | buf.push('\n'); | 510 | buf.push('\n'); |
diff --git a/crates/hir_def/src/find_path.rs b/crates/hir_def/src/find_path.rs index dc3f2908f..c06a37294 100644 --- a/crates/hir_def/src/find_path.rs +++ b/crates/hir_def/src/find_path.rs | |||
@@ -130,7 +130,8 @@ fn find_path_inner( | |||
130 | } | 130 | } |
131 | 131 | ||
132 | // - if the item is the crate root of a dependency crate, return the name from the extern prelude | 132 | // - if the item is the crate root of a dependency crate, return the name from the extern prelude |
133 | for (name, def_id) in root.def_map(db).extern_prelude() { | 133 | let root_def_map = root.def_map(db); |
134 | for (name, def_id) in root_def_map.extern_prelude() { | ||
134 | if item == ItemInNs::Types(*def_id) { | 135 | if item == ItemInNs::Types(*def_id) { |
135 | let name = scope_name.unwrap_or_else(|| name.clone()); | 136 | let name = scope_name.unwrap_or_else(|| name.clone()); |
136 | return Some(ModPath::from_segments(PathKind::Plain, vec![name])); | 137 | return Some(ModPath::from_segments(PathKind::Plain, vec![name])); |
@@ -138,7 +139,8 @@ fn find_path_inner( | |||
138 | } | 139 | } |
139 | 140 | ||
140 | // - if the item is in the prelude, return the name from there | 141 | // - if the item is in the prelude, return the name from there |
141 | if let Some(prelude_module) = def_map.prelude() { | 142 | if let Some(prelude_module) = root_def_map.prelude() { |
143 | // Preludes in block DefMaps are ignored, only the crate DefMap is searched | ||
142 | let prelude_def_map = prelude_module.def_map(db); | 144 | let prelude_def_map = prelude_module.def_map(db); |
143 | let prelude_scope: &crate::item_scope::ItemScope = | 145 | let prelude_scope: &crate::item_scope::ItemScope = |
144 | &prelude_def_map[prelude_module.local_id].scope; | 146 | &prelude_def_map[prelude_module.local_id].scope; |
@@ -1057,4 +1059,28 @@ fn f() { | |||
1057 | "dep", | 1059 | "dep", |
1058 | ); | 1060 | ); |
1059 | } | 1061 | } |
1062 | |||
1063 | #[test] | ||
1064 | fn prelude_with_inner_items() { | ||
1065 | check_found_path( | ||
1066 | r#" | ||
1067 | //- /main.rs crate:main deps:std | ||
1068 | fn f() { | ||
1069 | fn inner() {} | ||
1070 | $0 | ||
1071 | } | ||
1072 | //- /std.rs crate:std | ||
1073 | pub mod prelude { | ||
1074 | pub enum Option { None } | ||
1075 | pub use Option::*; | ||
1076 | } | ||
1077 | #[prelude_import] | ||
1078 | pub use prelude::*; | ||
1079 | "#, | ||
1080 | "None", | ||
1081 | "None", | ||
1082 | "None", | ||
1083 | "None", | ||
1084 | ); | ||
1085 | } | ||
1060 | } | 1086 | } |
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index 1e4b0cc19..3e9abd8a1 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs | |||
@@ -3,14 +3,14 @@ | |||
3 | use std::sync::Arc; | 3 | use std::sync::Arc; |
4 | 4 | ||
5 | use base_db::{salsa, SourceDatabase}; | 5 | use base_db::{salsa, SourceDatabase}; |
6 | use mbe::{ExpandError, ExpandResult, MacroDef, MacroRules}; | 6 | use mbe::{ExpandError, ExpandResult}; |
7 | use parser::FragmentKind; | 7 | use parser::FragmentKind; |
8 | use syntax::{ | 8 | use syntax::{ |
9 | algo::diff, | 9 | algo::diff, |
10 | ast::{MacroStmts, NameOwner}, | 10 | ast::{self, NameOwner}, |
11 | AstNode, GreenNode, Parse, | 11 | AstNode, GreenNode, Parse, |
12 | SyntaxKind::*, | 12 | SyntaxKind::*, |
13 | SyntaxNode, | 13 | SyntaxNode, SyntaxToken, |
14 | }; | 14 | }; |
15 | 15 | ||
16 | use crate::{ | 16 | use crate::{ |
@@ -27,23 +27,28 @@ const TOKEN_LIMIT: usize = 524288; | |||
27 | 27 | ||
28 | #[derive(Debug, Clone, Eq, PartialEq)] | 28 | #[derive(Debug, Clone, Eq, PartialEq)] |
29 | pub enum TokenExpander { | 29 | pub enum TokenExpander { |
30 | MacroRules(mbe::MacroRules), | 30 | /// Old-style `macro_rules`. |
31 | MacroDef(mbe::MacroDef), | 31 | MacroRules { mac: mbe::MacroRules, def_site_token_map: mbe::TokenMap }, |
32 | /// AKA macros 2.0. | ||
33 | MacroDef { mac: mbe::MacroDef, def_site_token_map: mbe::TokenMap }, | ||
34 | /// Stuff like `line!` and `file!`. | ||
32 | Builtin(BuiltinFnLikeExpander), | 35 | Builtin(BuiltinFnLikeExpander), |
36 | /// `derive(Copy)` and such. | ||
33 | BuiltinDerive(BuiltinDeriveExpander), | 37 | BuiltinDerive(BuiltinDeriveExpander), |
38 | /// The thing we love the most here in rust-analyzer -- procedural macros. | ||
34 | ProcMacro(ProcMacroExpander), | 39 | ProcMacro(ProcMacroExpander), |
35 | } | 40 | } |
36 | 41 | ||
37 | impl TokenExpander { | 42 | impl TokenExpander { |
38 | pub fn expand( | 43 | fn expand( |
39 | &self, | 44 | &self, |
40 | db: &dyn AstDatabase, | 45 | db: &dyn AstDatabase, |
41 | id: LazyMacroId, | 46 | id: LazyMacroId, |
42 | tt: &tt::Subtree, | 47 | tt: &tt::Subtree, |
43 | ) -> mbe::ExpandResult<tt::Subtree> { | 48 | ) -> mbe::ExpandResult<tt::Subtree> { |
44 | match self { | 49 | match self { |
45 | TokenExpander::MacroRules(it) => it.expand(tt), | 50 | TokenExpander::MacroRules { mac, .. } => mac.expand(tt), |
46 | TokenExpander::MacroDef(it) => it.expand(tt), | 51 | TokenExpander::MacroDef { mac, .. } => mac.expand(tt), |
47 | TokenExpander::Builtin(it) => it.expand(db, id, tt), | 52 | TokenExpander::Builtin(it) => it.expand(db, id, tt), |
48 | // FIXME switch these to ExpandResult as well | 53 | // FIXME switch these to ExpandResult as well |
49 | TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(), | 54 | TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(), |
@@ -56,23 +61,23 @@ impl TokenExpander { | |||
56 | } | 61 | } |
57 | } | 62 | } |
58 | 63 | ||
59 | pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { | 64 | pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { |
60 | match self { | 65 | match self { |
61 | TokenExpander::MacroRules(it) => it.map_id_down(id), | 66 | TokenExpander::MacroRules { mac, .. } => mac.map_id_down(id), |
62 | TokenExpander::MacroDef(it) => it.map_id_down(id), | 67 | TokenExpander::MacroDef { mac, .. } => mac.map_id_down(id), |
63 | TokenExpander::Builtin(..) => id, | 68 | TokenExpander::Builtin(..) |
64 | TokenExpander::BuiltinDerive(..) => id, | 69 | | TokenExpander::BuiltinDerive(..) |
65 | TokenExpander::ProcMacro(..) => id, | 70 | | TokenExpander::ProcMacro(..) => id, |
66 | } | 71 | } |
67 | } | 72 | } |
68 | 73 | ||
69 | pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { | 74 | pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { |
70 | match self { | 75 | match self { |
71 | TokenExpander::MacroRules(it) => it.map_id_up(id), | 76 | TokenExpander::MacroRules { mac, .. } => mac.map_id_up(id), |
72 | TokenExpander::MacroDef(it) => it.map_id_up(id), | 77 | TokenExpander::MacroDef { mac, .. } => mac.map_id_up(id), |
73 | TokenExpander::Builtin(..) => (id, mbe::Origin::Call), | 78 | TokenExpander::Builtin(..) |
74 | TokenExpander::BuiltinDerive(..) => (id, mbe::Origin::Call), | 79 | | TokenExpander::BuiltinDerive(..) |
75 | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call), | 80 | | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call), |
76 | } | 81 | } |
77 | } | 82 | } |
78 | } | 83 | } |
@@ -82,28 +87,48 @@ impl TokenExpander { | |||
82 | pub trait AstDatabase: SourceDatabase { | 87 | pub trait AstDatabase: SourceDatabase { |
83 | fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>; | 88 | fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>; |
84 | 89 | ||
90 | /// Main public API -- parsis a hir file, not caring whether it's a real | ||
91 | /// file or a macro expansion. | ||
85 | #[salsa::transparent] | 92 | #[salsa::transparent] |
86 | fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>; | 93 | fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>; |
87 | 94 | /// Implementation for the macro case. | |
88 | #[salsa::interned] | ||
89 | fn intern_macro(&self, macro_call: MacroCallLoc) -> LazyMacroId; | ||
90 | fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>; | ||
91 | #[salsa::transparent] | ||
92 | fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>; | ||
93 | fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>; | ||
94 | fn parse_macro_expansion( | 95 | fn parse_macro_expansion( |
95 | &self, | 96 | &self, |
96 | macro_file: MacroFile, | 97 | macro_file: MacroFile, |
97 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>>; | 98 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>>; |
98 | fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>>; | ||
99 | |||
100 | /// Firewall query that returns the error from the `macro_expand` query. | ||
101 | fn macro_expand_error(&self, macro_call: MacroCallId) -> Option<ExpandError>; | ||
102 | 99 | ||
100 | /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the | ||
101 | /// reason why we use salsa at all. | ||
102 | /// | ||
103 | /// We encode macro definitions into ids of macro calls, this what allows us | ||
104 | /// to be incremental. | ||
105 | #[salsa::interned] | ||
106 | fn intern_macro(&self, macro_call: MacroCallLoc) -> LazyMacroId; | ||
107 | /// Certain built-in macros are eager (`format!(concat!("file: ", file!(), "{}"")), 92`). | ||
108 | /// For them, we actually want to encode the whole token tree as an argument. | ||
103 | #[salsa::interned] | 109 | #[salsa::interned] |
104 | fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId; | 110 | fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId; |
105 | 111 | ||
112 | /// Lowers syntactic macro call to a token tree representation. | ||
113 | #[salsa::transparent] | ||
114 | fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>; | ||
115 | /// Extracts syntax node, corresponding to a macro call. That's a firewall | ||
116 | /// query, only typing in the macro call itself changes the returned | ||
117 | /// subtree. | ||
118 | fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>; | ||
119 | /// Gets the expander for this macro. This compiles declarative macros, and | ||
120 | /// just fetches procedural ones. | ||
121 | fn macro_def(&self, id: MacroDefId) -> Option<Arc<TokenExpander>>; | ||
122 | |||
123 | /// Expand macro call to a token tree. This query is LRUed (we keep 128 or so results in memory) | ||
124 | fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>>; | ||
125 | /// Special case of the previous query for procedural macros. We can't LRU | ||
126 | /// proc macros, since they are not deterministic in general, and | ||
127 | /// non-determinism breaks salsa in a very, very, very bad way. @edwin0cheng | ||
128 | /// heroically debugged this once! | ||
106 | fn expand_proc_macro(&self, call: MacroCallId) -> Result<tt::Subtree, mbe::ExpandError>; | 129 | fn expand_proc_macro(&self, call: MacroCallId) -> Result<tt::Subtree, mbe::ExpandError>; |
130 | /// Firewall query that returns the error from the `macro_expand` query. | ||
131 | fn macro_expand_error(&self, macro_call: MacroCallId) -> Option<ExpandError>; | ||
107 | 132 | ||
108 | fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>; | 133 | fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>; |
109 | } | 134 | } |
@@ -115,36 +140,159 @@ pub trait AstDatabase: SourceDatabase { | |||
115 | pub fn expand_hypothetical( | 140 | pub fn expand_hypothetical( |
116 | db: &dyn AstDatabase, | 141 | db: &dyn AstDatabase, |
117 | actual_macro_call: MacroCallId, | 142 | actual_macro_call: MacroCallId, |
118 | hypothetical_args: &syntax::ast::TokenTree, | 143 | hypothetical_args: &ast::TokenTree, |
119 | token_to_map: syntax::SyntaxToken, | 144 | token_to_map: SyntaxToken, |
120 | ) -> Option<(SyntaxNode, syntax::SyntaxToken)> { | 145 | ) -> Option<(SyntaxNode, SyntaxToken)> { |
121 | let macro_file = MacroFile { macro_call_id: actual_macro_call }; | ||
122 | let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()); | 146 | let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()); |
123 | let range = | 147 | let range = |
124 | token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?; | 148 | token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?; |
125 | let token_id = tmap_1.token_by_range(range)?; | 149 | let token_id = tmap_1.token_by_range(range)?; |
126 | let macro_def = expander(db, actual_macro_call)?; | 150 | |
151 | let lazy_id = match actual_macro_call { | ||
152 | MacroCallId::LazyMacro(id) => id, | ||
153 | MacroCallId::EagerMacro(_) => return None, | ||
154 | }; | ||
155 | |||
156 | let macro_def = { | ||
157 | let loc = db.lookup_intern_macro(lazy_id); | ||
158 | db.macro_def(loc.def)? | ||
159 | }; | ||
160 | |||
161 | let hypothetical_expansion = macro_def.expand(db, lazy_id, &tt); | ||
162 | |||
163 | let fragment_kind = to_fragment_kind(db, actual_macro_call); | ||
164 | |||
127 | let (node, tmap_2) = | 165 | let (node, tmap_2) = |
128 | parse_macro_with_arg(db, macro_file, Some(std::sync::Arc::new((tt, tmap_1)))).value?; | 166 | mbe::token_tree_to_syntax_node(&hypothetical_expansion.value, fragment_kind).ok()?; |
129 | let token_id = macro_def.0.map_id_down(token_id); | 167 | |
168 | let token_id = macro_def.map_id_down(token_id); | ||
130 | let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; | 169 | let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; |
131 | let token = node.syntax_node().covering_element(range).into_token()?; | 170 | let token = node.syntax_node().covering_element(range).into_token()?; |
132 | Some((node.syntax_node(), token)) | 171 | Some((node.syntax_node(), token)) |
133 | } | 172 | } |
134 | 173 | ||
135 | fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { | 174 | fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { |
136 | let map = | 175 | let map = db.parse_or_expand(file_id).map(|it| AstIdMap::from_source(&it)).unwrap_or_default(); |
137 | db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it)); | ||
138 | Arc::new(map) | 176 | Arc::new(map) |
139 | } | 177 | } |
140 | 178 | ||
141 | fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { | 179 | fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> { |
180 | match file_id.0 { | ||
181 | HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), | ||
182 | HirFileIdRepr::MacroFile(macro_file) => { | ||
183 | db.parse_macro_expansion(macro_file).value.map(|(it, _)| it.syntax_node()) | ||
184 | } | ||
185 | } | ||
186 | } | ||
187 | |||
188 | fn parse_macro_expansion( | ||
189 | db: &dyn AstDatabase, | ||
190 | macro_file: MacroFile, | ||
191 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> { | ||
192 | let _p = profile::span("parse_macro_expansion"); | ||
193 | let result = db.macro_expand(macro_file.macro_call_id); | ||
194 | |||
195 | if let Some(err) = &result.err { | ||
196 | // Note: | ||
197 | // The final goal we would like to make all parse_macro success, | ||
198 | // such that the following log will not call anyway. | ||
199 | match macro_file.macro_call_id { | ||
200 | MacroCallId::LazyMacro(id) => { | ||
201 | let loc: MacroCallLoc = db.lookup_intern_macro(id); | ||
202 | let node = loc.kind.node(db); | ||
203 | |||
204 | // collect parent information for warning log | ||
205 | let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| { | ||
206 | it.file_id.call_node(db) | ||
207 | }) | ||
208 | .map(|n| format!("{:#}", n.value)) | ||
209 | .collect::<Vec<_>>() | ||
210 | .join("\n"); | ||
211 | |||
212 | log::warn!( | ||
213 | "fail on macro_parse: (reason: {:?} macro_call: {:#}) parents: {}", | ||
214 | err, | ||
215 | node.value, | ||
216 | parents | ||
217 | ); | ||
218 | } | ||
219 | _ => { | ||
220 | log::warn!("fail on macro_parse: (reason: {:?})", err); | ||
221 | } | ||
222 | } | ||
223 | } | ||
224 | let tt = match result.value { | ||
225 | Some(tt) => tt, | ||
226 | None => return ExpandResult { value: None, err: result.err }, | ||
227 | }; | ||
228 | |||
229 | let fragment_kind = to_fragment_kind(db, macro_file.macro_call_id); | ||
230 | |||
231 | log::debug!("expanded = {}", tt.as_debug_string()); | ||
232 | log::debug!("kind = {:?}", fragment_kind); | ||
233 | |||
234 | let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) { | ||
235 | Ok(it) => it, | ||
236 | Err(err) => { | ||
237 | log::debug!( | ||
238 | "failed to parse expanstion to {:?} = {}", | ||
239 | fragment_kind, | ||
240 | tt.as_debug_string() | ||
241 | ); | ||
242 | return ExpandResult::only_err(err); | ||
243 | } | ||
244 | }; | ||
245 | |||
246 | match result.err { | ||
247 | Some(err) => { | ||
248 | // Safety check for recursive identity macro. | ||
249 | let node = parse.syntax_node(); | ||
250 | let file: HirFileId = macro_file.into(); | ||
251 | let call_node = match file.call_node(db) { | ||
252 | Some(it) => it, | ||
253 | None => { | ||
254 | return ExpandResult::only_err(err); | ||
255 | } | ||
256 | }; | ||
257 | if is_self_replicating(&node, &call_node.value) { | ||
258 | return ExpandResult::only_err(err); | ||
259 | } else { | ||
260 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) } | ||
261 | } | ||
262 | } | ||
263 | None => { | ||
264 | log::debug!("parse = {:?}", parse.syntax_node().kind()); | ||
265 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None } | ||
266 | } | ||
267 | } | ||
268 | } | ||
269 | |||
270 | fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { | ||
271 | let arg = db.macro_arg_text(id)?; | ||
272 | let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg)); | ||
273 | Some(Arc::new((tt, tmap))) | ||
274 | } | ||
275 | |||
276 | fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> { | ||
277 | let id = match id { | ||
278 | MacroCallId::LazyMacro(id) => id, | ||
279 | MacroCallId::EagerMacro(_id) => { | ||
280 | // FIXME: support macro_arg for eager macro | ||
281 | return None; | ||
282 | } | ||
283 | }; | ||
284 | let loc = db.lookup_intern_macro(id); | ||
285 | let arg = loc.kind.arg(db)?; | ||
286 | Some(arg.green()) | ||
287 | } | ||
288 | |||
289 | fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<TokenExpander>> { | ||
142 | match id.kind { | 290 | match id.kind { |
143 | MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) { | 291 | MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) { |
144 | syntax::ast::Macro::MacroRules(macro_rules) => { | 292 | ast::Macro::MacroRules(macro_rules) => { |
145 | let arg = macro_rules.token_tree()?; | 293 | let arg = macro_rules.token_tree()?; |
146 | let (tt, tmap) = mbe::ast_to_token_tree(&arg); | 294 | let (tt, def_site_token_map) = mbe::ast_to_token_tree(&arg); |
147 | let rules = match MacroRules::parse(&tt) { | 295 | let mac = match mbe::MacroRules::parse(&tt) { |
148 | Ok(it) => it, | 296 | Ok(it) => it, |
149 | Err(err) => { | 297 | Err(err) => { |
150 | let name = macro_rules.name().map(|n| n.to_string()).unwrap_or_default(); | 298 | let name = macro_rules.name().map(|n| n.to_string()).unwrap_or_default(); |
@@ -152,12 +300,12 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, | |||
152 | return None; | 300 | return None; |
153 | } | 301 | } |
154 | }; | 302 | }; |
155 | Some(Arc::new((TokenExpander::MacroRules(rules), tmap))) | 303 | Some(Arc::new(TokenExpander::MacroRules { mac, def_site_token_map })) |
156 | } | 304 | } |
157 | syntax::ast::Macro::MacroDef(macro_def) => { | 305 | ast::Macro::MacroDef(macro_def) => { |
158 | let arg = macro_def.body()?; | 306 | let arg = macro_def.body()?; |
159 | let (tt, tmap) = mbe::ast_to_token_tree(&arg); | 307 | let (tt, def_site_token_map) = mbe::ast_to_token_tree(&arg); |
160 | let rules = match MacroDef::parse(&tt) { | 308 | let mac = match mbe::MacroDef::parse(&tt) { |
161 | Ok(it) => it, | 309 | Ok(it) => it, |
162 | Err(err) => { | 310 | Err(err) => { |
163 | let name = macro_def.name().map(|n| n.to_string()).unwrap_or_default(); | 311 | let name = macro_def.name().map(|n| n.to_string()).unwrap_or_default(); |
@@ -165,41 +313,18 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, | |||
165 | return None; | 313 | return None; |
166 | } | 314 | } |
167 | }; | 315 | }; |
168 | Some(Arc::new((TokenExpander::MacroDef(rules), tmap))) | 316 | Some(Arc::new(TokenExpander::MacroDef { mac, def_site_token_map })) |
169 | } | 317 | } |
170 | }, | 318 | }, |
171 | MacroDefKind::BuiltIn(expander, _) => { | 319 | MacroDefKind::BuiltIn(expander, _) => Some(Arc::new(TokenExpander::Builtin(expander))), |
172 | Some(Arc::new((TokenExpander::Builtin(expander), mbe::TokenMap::default()))) | ||
173 | } | ||
174 | MacroDefKind::BuiltInDerive(expander, _) => { | 320 | MacroDefKind::BuiltInDerive(expander, _) => { |
175 | Some(Arc::new((TokenExpander::BuiltinDerive(expander), mbe::TokenMap::default()))) | 321 | Some(Arc::new(TokenExpander::BuiltinDerive(expander))) |
176 | } | 322 | } |
177 | MacroDefKind::BuiltInEager(..) => None, | 323 | MacroDefKind::BuiltInEager(..) => None, |
178 | MacroDefKind::ProcMacro(expander, ..) => { | 324 | MacroDefKind::ProcMacro(expander, ..) => Some(Arc::new(TokenExpander::ProcMacro(expander))), |
179 | Some(Arc::new((TokenExpander::ProcMacro(expander), mbe::TokenMap::default()))) | ||
180 | } | ||
181 | } | 325 | } |
182 | } | 326 | } |
183 | 327 | ||
184 | fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> { | ||
185 | let id = match id { | ||
186 | MacroCallId::LazyMacro(id) => id, | ||
187 | MacroCallId::EagerMacro(_id) => { | ||
188 | // FIXME: support macro_arg for eager macro | ||
189 | return None; | ||
190 | } | ||
191 | }; | ||
192 | let loc = db.lookup_intern_macro(id); | ||
193 | let arg = loc.kind.arg(db)?; | ||
194 | Some(arg.green()) | ||
195 | } | ||
196 | |||
197 | fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { | ||
198 | let arg = db.macro_arg_text(id)?; | ||
199 | let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg)); | ||
200 | Some(Arc::new((tt, tmap))) | ||
201 | } | ||
202 | |||
203 | fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>> { | 328 | fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>> { |
204 | macro_expand_with_arg(db, id, None) | 329 | macro_expand_with_arg(db, id, None) |
205 | } | 330 | } |
@@ -208,19 +333,6 @@ fn macro_expand_error(db: &dyn AstDatabase, macro_call: MacroCallId) -> Option<E | |||
208 | db.macro_expand(macro_call).err | 333 | db.macro_expand(macro_call).err |
209 | } | 334 | } |
210 | 335 | ||
211 | fn expander(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { | ||
212 | let lazy_id = match id { | ||
213 | MacroCallId::LazyMacro(id) => id, | ||
214 | MacroCallId::EagerMacro(_id) => { | ||
215 | return None; | ||
216 | } | ||
217 | }; | ||
218 | |||
219 | let loc = db.lookup_intern_macro(lazy_id); | ||
220 | let macro_rules = db.macro_def(loc.def)?; | ||
221 | Some(macro_rules) | ||
222 | } | ||
223 | |||
224 | fn macro_expand_with_arg( | 336 | fn macro_expand_with_arg( |
225 | db: &dyn AstDatabase, | 337 | db: &dyn AstDatabase, |
226 | id: MacroCallId, | 338 | id: MacroCallId, |
@@ -254,7 +366,7 @@ fn macro_expand_with_arg( | |||
254 | Some(it) => it, | 366 | Some(it) => it, |
255 | None => return ExpandResult::str_err("Fail to find macro definition".into()), | 367 | None => return ExpandResult::str_err("Fail to find macro definition".into()), |
256 | }; | 368 | }; |
257 | let ExpandResult { value: tt, err } = macro_rules.0.expand(db, lazy_id, ¯o_arg.0); | 369 | let ExpandResult { value: tt, err } = macro_rules.expand(db, lazy_id, ¯o_arg.0); |
258 | // Set a hard limit for the expanded tt | 370 | // Set a hard limit for the expanded tt |
259 | let count = tt.count(); | 371 | let count = tt.count(); |
260 | if count > TOKEN_LIMIT { | 372 | if count > TOKEN_LIMIT { |
@@ -294,116 +406,11 @@ fn expand_proc_macro( | |||
294 | expander.expand(db, loc.krate, ¯o_arg.0) | 406 | expander.expand(db, loc.krate, ¯o_arg.0) |
295 | } | 407 | } |
296 | 408 | ||
297 | fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> { | ||
298 | match file_id.0 { | ||
299 | HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), | ||
300 | HirFileIdRepr::MacroFile(macro_file) => { | ||
301 | db.parse_macro_expansion(macro_file).value.map(|(it, _)| it.syntax_node()) | ||
302 | } | ||
303 | } | ||
304 | } | ||
305 | |||
306 | fn parse_macro_expansion( | ||
307 | db: &dyn AstDatabase, | ||
308 | macro_file: MacroFile, | ||
309 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> { | ||
310 | parse_macro_with_arg(db, macro_file, None) | ||
311 | } | ||
312 | |||
313 | fn parse_macro_with_arg( | ||
314 | db: &dyn AstDatabase, | ||
315 | macro_file: MacroFile, | ||
316 | arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>, | ||
317 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> { | ||
318 | let macro_call_id = macro_file.macro_call_id; | ||
319 | let result = if let Some(arg) = arg { | ||
320 | macro_expand_with_arg(db, macro_call_id, Some(arg)) | ||
321 | } else { | ||
322 | db.macro_expand(macro_call_id) | ||
323 | }; | ||
324 | |||
325 | let _p = profile::span("parse_macro_expansion"); | ||
326 | |||
327 | if let Some(err) = &result.err { | ||
328 | // Note: | ||
329 | // The final goal we would like to make all parse_macro success, | ||
330 | // such that the following log will not call anyway. | ||
331 | match macro_call_id { | ||
332 | MacroCallId::LazyMacro(id) => { | ||
333 | let loc: MacroCallLoc = db.lookup_intern_macro(id); | ||
334 | let node = loc.kind.node(db); | ||
335 | |||
336 | // collect parent information for warning log | ||
337 | let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| { | ||
338 | it.file_id.call_node(db) | ||
339 | }) | ||
340 | .map(|n| format!("{:#}", n.value)) | ||
341 | .collect::<Vec<_>>() | ||
342 | .join("\n"); | ||
343 | |||
344 | log::warn!( | ||
345 | "fail on macro_parse: (reason: {:?} macro_call: {:#}) parents: {}", | ||
346 | err, | ||
347 | node.value, | ||
348 | parents | ||
349 | ); | ||
350 | } | ||
351 | _ => { | ||
352 | log::warn!("fail on macro_parse: (reason: {:?})", err); | ||
353 | } | ||
354 | } | ||
355 | } | ||
356 | let tt = match result.value { | ||
357 | Some(tt) => tt, | ||
358 | None => return ExpandResult { value: None, err: result.err }, | ||
359 | }; | ||
360 | |||
361 | let fragment_kind = to_fragment_kind(db, macro_call_id); | ||
362 | |||
363 | log::debug!("expanded = {}", tt.as_debug_string()); | ||
364 | log::debug!("kind = {:?}", fragment_kind); | ||
365 | |||
366 | let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) { | ||
367 | Ok(it) => it, | ||
368 | Err(err) => { | ||
369 | log::debug!( | ||
370 | "failed to parse expanstion to {:?} = {}", | ||
371 | fragment_kind, | ||
372 | tt.as_debug_string() | ||
373 | ); | ||
374 | return ExpandResult::only_err(err); | ||
375 | } | ||
376 | }; | ||
377 | |||
378 | match result.err { | ||
379 | Some(err) => { | ||
380 | // Safety check for recursive identity macro. | ||
381 | let node = parse.syntax_node(); | ||
382 | let file: HirFileId = macro_file.into(); | ||
383 | let call_node = match file.call_node(db) { | ||
384 | Some(it) => it, | ||
385 | None => { | ||
386 | return ExpandResult::only_err(err); | ||
387 | } | ||
388 | }; | ||
389 | if is_self_replicating(&node, &call_node.value) { | ||
390 | return ExpandResult::only_err(err); | ||
391 | } else { | ||
392 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) } | ||
393 | } | ||
394 | } | ||
395 | None => { | ||
396 | log::debug!("parse = {:?}", parse.syntax_node().kind()); | ||
397 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None } | ||
398 | } | ||
399 | } | ||
400 | } | ||
401 | |||
402 | fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool { | 409 | fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool { |
403 | if diff(from, to).is_empty() { | 410 | if diff(from, to).is_empty() { |
404 | return true; | 411 | return true; |
405 | } | 412 | } |
406 | if let Some(stmts) = MacroStmts::cast(from.clone()) { | 413 | if let Some(stmts) = ast::MacroStmts::cast(from.clone()) { |
407 | if stmts.statements().any(|stmt| diff(stmt.syntax(), to).is_empty()) { | 414 | if stmts.statements().any(|stmt| diff(stmt.syntax(), to).is_empty()) { |
408 | return true; | 415 | return true; |
409 | } | 416 | } |
diff --git a/crates/hir_expand/src/hygiene.rs b/crates/hir_expand/src/hygiene.rs index 779725629..ed61ebca3 100644 --- a/crates/hir_expand/src/hygiene.rs +++ b/crates/hir_expand/src/hygiene.rs | |||
@@ -5,6 +5,7 @@ | |||
5 | use std::sync::Arc; | 5 | use std::sync::Arc; |
6 | 6 | ||
7 | use base_db::CrateId; | 7 | use base_db::CrateId; |
8 | use db::TokenExpander; | ||
8 | use either::Either; | 9 | use either::Either; |
9 | use mbe::Origin; | 10 | use mbe::Origin; |
10 | use parser::SyntaxKind; | 11 | use parser::SyntaxKind; |
@@ -115,7 +116,7 @@ struct HygieneInfo { | |||
115 | /// The `macro_rules!` arguments. | 116 | /// The `macro_rules!` arguments. |
116 | def_start: Option<InFile<TextSize>>, | 117 | def_start: Option<InFile<TextSize>>, |
117 | 118 | ||
118 | macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, | 119 | macro_def: Arc<TokenExpander>, |
119 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, | 120 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, |
120 | exp_map: Arc<mbe::TokenMap>, | 121 | exp_map: Arc<mbe::TokenMap>, |
121 | } | 122 | } |
@@ -124,13 +125,16 @@ impl HygieneInfo { | |||
124 | fn map_ident_up(&self, token: TextRange) -> Option<(InFile<TextRange>, Origin)> { | 125 | fn map_ident_up(&self, token: TextRange) -> Option<(InFile<TextRange>, Origin)> { |
125 | let token_id = self.exp_map.token_by_range(token)?; | 126 | let token_id = self.exp_map.token_by_range(token)?; |
126 | 127 | ||
127 | let (token_id, origin) = self.macro_def.0.map_id_up(token_id); | 128 | let (token_id, origin) = self.macro_def.map_id_up(token_id); |
128 | let (token_map, tt) = match origin { | 129 | let (token_map, tt) = match origin { |
129 | mbe::Origin::Call => (&self.macro_arg.1, self.arg_start), | 130 | mbe::Origin::Call => (&self.macro_arg.1, self.arg_start), |
130 | mbe::Origin::Def => ( | 131 | mbe::Origin::Def => match (&*self.macro_def, self.def_start) { |
131 | &self.macro_def.1, | 132 | (TokenExpander::MacroDef { def_site_token_map, .. }, Some(tt)) |
132 | *self.def_start.as_ref().expect("`Origin::Def` used with non-`macro_rules!` macro"), | 133 | | (TokenExpander::MacroRules { def_site_token_map, .. }, Some(tt)) => { |
133 | ), | 134 | (def_site_token_map, tt) |
135 | } | ||
136 | _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), | ||
137 | }, | ||
134 | }; | 138 | }; |
135 | 139 | ||
136 | let range = token_map.range_by_token(token_id)?.by_kind(SyntaxKind::IDENT)?; | 140 | let range = token_map.range_by_token(token_id)?.by_kind(SyntaxKind::IDENT)?; |
diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index a0e6aec62..0402640de 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs | |||
@@ -351,7 +351,7 @@ pub struct ExpansionInfo { | |||
351 | /// The `macro_rules!` arguments. | 351 | /// The `macro_rules!` arguments. |
352 | def: Option<InFile<ast::TokenTree>>, | 352 | def: Option<InFile<ast::TokenTree>>, |
353 | 353 | ||
354 | macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, | 354 | macro_def: Arc<db::TokenExpander>, |
355 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, | 355 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, |
356 | exp_map: Arc<mbe::TokenMap>, | 356 | exp_map: Arc<mbe::TokenMap>, |
357 | } | 357 | } |
@@ -368,7 +368,7 @@ impl ExpansionInfo { | |||
368 | assert_eq!(token.file_id, self.arg.file_id); | 368 | assert_eq!(token.file_id, self.arg.file_id); |
369 | let range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?; | 369 | let range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?; |
370 | let token_id = self.macro_arg.1.token_by_range(range)?; | 370 | let token_id = self.macro_arg.1.token_by_range(range)?; |
371 | let token_id = self.macro_def.0.map_id_down(token_id); | 371 | let token_id = self.macro_def.map_id_down(token_id); |
372 | 372 | ||
373 | let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?; | 373 | let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?; |
374 | 374 | ||
@@ -383,17 +383,16 @@ impl ExpansionInfo { | |||
383 | ) -> Option<(InFile<SyntaxToken>, Origin)> { | 383 | ) -> Option<(InFile<SyntaxToken>, Origin)> { |
384 | let token_id = self.exp_map.token_by_range(token.value.text_range())?; | 384 | let token_id = self.exp_map.token_by_range(token.value.text_range())?; |
385 | 385 | ||
386 | let (token_id, origin) = self.macro_def.0.map_id_up(token_id); | 386 | let (token_id, origin) = self.macro_def.map_id_up(token_id); |
387 | let (token_map, tt) = match origin { | 387 | let (token_map, tt) = match origin { |
388 | mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()), | 388 | mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()), |
389 | mbe::Origin::Def => ( | 389 | mbe::Origin::Def => match (&*self.macro_def, self.def.as_ref()) { |
390 | &self.macro_def.1, | 390 | (db::TokenExpander::MacroRules { def_site_token_map, .. }, Some(tt)) |
391 | self.def | 391 | | (db::TokenExpander::MacroDef { def_site_token_map, .. }, Some(tt)) => { |
392 | .as_ref() | 392 | (def_site_token_map, tt.as_ref().map(|tt| tt.syntax().clone())) |
393 | .expect("`Origin::Def` used with non-`macro_rules!` macro") | 393 | } |
394 | .as_ref() | 394 | _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), |
395 | .map(|tt| tt.syntax().clone()), | 395 | }, |
396 | ), | ||
397 | }; | 396 | }; |
398 | 397 | ||
399 | let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?; | 398 | let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?; |
diff --git a/crates/hir_ty/src/db.rs b/crates/hir_ty/src/db.rs index cf67d4266..9da0a02e3 100644 --- a/crates/hir_ty/src/db.rs +++ b/crates/hir_ty/src/db.rs | |||
@@ -70,6 +70,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> { | |||
70 | fn trait_environment(&self, def: GenericDefId) -> Arc<crate::TraitEnvironment>; | 70 | fn trait_environment(&self, def: GenericDefId) -> Arc<crate::TraitEnvironment>; |
71 | 71 | ||
72 | #[salsa::invoke(crate::lower::generic_defaults_query)] | 72 | #[salsa::invoke(crate::lower::generic_defaults_query)] |
73 | #[salsa::cycle(crate::lower::generic_defaults_recover)] | ||
73 | fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<Ty>]>; | 74 | fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<Ty>]>; |
74 | 75 | ||
75 | #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)] | 76 | #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)] |
diff --git a/crates/hir_ty/src/lower.rs b/crates/hir_ty/src/lower.rs index 7fd46becd..c99dd8d0a 100644 --- a/crates/hir_ty/src/lower.rs +++ b/crates/hir_ty/src/lower.rs | |||
@@ -414,17 +414,16 @@ impl<'a> TyLoweringContext<'a> { | |||
414 | self.lower_trait_ref_from_resolved_path(trait_, resolved_segment, self_ty); | 414 | self.lower_trait_ref_from_resolved_path(trait_, resolved_segment, self_ty); |
415 | let ty = if remaining_segments.len() == 1 { | 415 | let ty = if remaining_segments.len() == 1 { |
416 | let segment = remaining_segments.first().unwrap(); | 416 | let segment = remaining_segments.first().unwrap(); |
417 | let found = associated_type_by_name_including_super_traits( | 417 | let found = self |
418 | self.db, | 418 | .db |
419 | trait_ref, | 419 | .trait_data(trait_ref.hir_trait_id()) |
420 | &segment.name, | 420 | .associated_type_by_name(&segment.name); |
421 | ); | ||
422 | match found { | 421 | match found { |
423 | Some((super_trait_ref, associated_ty)) => { | 422 | Some(associated_ty) => { |
424 | // FIXME handle type parameters on the segment | 423 | // FIXME handle type parameters on the segment |
425 | TyKind::Alias(AliasTy::Projection(ProjectionTy { | 424 | TyKind::Alias(AliasTy::Projection(ProjectionTy { |
426 | associated_ty_id: to_assoc_type_id(associated_ty), | 425 | associated_ty_id: to_assoc_type_id(associated_ty), |
427 | substitution: super_trait_ref.substitution, | 426 | substitution: trait_ref.substitution, |
428 | })) | 427 | })) |
429 | .intern(&Interner) | 428 | .intern(&Interner) |
430 | } | 429 | } |
@@ -1089,6 +1088,27 @@ pub(crate) fn generic_defaults_query( | |||
1089 | defaults | 1088 | defaults |
1090 | } | 1089 | } |
1091 | 1090 | ||
1091 | pub(crate) fn generic_defaults_recover( | ||
1092 | db: &dyn HirDatabase, | ||
1093 | _cycle: &[String], | ||
1094 | def: &GenericDefId, | ||
1095 | ) -> Arc<[Binders<Ty>]> { | ||
1096 | let generic_params = generics(db.upcast(), *def); | ||
1097 | |||
1098 | // we still need one default per parameter | ||
1099 | let defaults = generic_params | ||
1100 | .iter() | ||
1101 | .enumerate() | ||
1102 | .map(|(idx, _)| { | ||
1103 | let ty = TyKind::Error.intern(&Interner); | ||
1104 | |||
1105 | crate::make_only_type_binders(idx, ty) | ||
1106 | }) | ||
1107 | .collect(); | ||
1108 | |||
1109 | defaults | ||
1110 | } | ||
1111 | |||
1092 | fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig { | 1112 | fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig { |
1093 | let data = db.function_data(def); | 1113 | let data = db.function_data(def); |
1094 | let resolver = def.resolver(db.upcast()); | 1114 | let resolver = def.resolver(db.upcast()); |
diff --git a/crates/hir_ty/src/tests/regression.rs b/crates/hir_ty/src/tests/regression.rs index 9cd9f473d..d14f5c9bb 100644 --- a/crates/hir_ty/src/tests/regression.rs +++ b/crates/hir_ty/src/tests/regression.rs | |||
@@ -1012,3 +1012,41 @@ fn lifetime_from_chalk_during_deref() { | |||
1012 | "#, | 1012 | "#, |
1013 | ) | 1013 | ) |
1014 | } | 1014 | } |
1015 | |||
1016 | #[test] | ||
1017 | fn issue_8686() { | ||
1018 | check_infer( | ||
1019 | r#" | ||
1020 | pub trait Try: FromResidual { | ||
1021 | type Output; | ||
1022 | type Residual; | ||
1023 | } | ||
1024 | pub trait FromResidual<R = <Self as Try>::Residual> { | ||
1025 | fn from_residual(residual: R) -> Self; | ||
1026 | } | ||
1027 | |||
1028 | struct ControlFlow<B, C>; | ||
1029 | impl<B, C> Try for ControlFlow<B, C> { | ||
1030 | type Output = C; | ||
1031 | type Residual = ControlFlow<B, !>; | ||
1032 | } | ||
1033 | impl<B, C> FromResidual for ControlFlow<B, C> { | ||
1034 | fn from_residual(r: ControlFlow<B, !>) -> Self { ControlFlow } | ||
1035 | } | ||
1036 | |||
1037 | fn test() { | ||
1038 | ControlFlow::from_residual(ControlFlow::<u32, !>); | ||
1039 | } | ||
1040 | "#, | ||
1041 | expect![[r#" | ||
1042 | 144..152 'residual': R | ||
1043 | 365..366 'r': ControlFlow<B, !> | ||
1044 | 395..410 '{ ControlFlow }': ControlFlow<B, C> | ||
1045 | 397..408 'ControlFlow': ControlFlow<B, C> | ||
1046 | 424..482 '{ ...!>); }': () | ||
1047 | 430..456 'Contro...sidual': fn from_residual<ControlFlow<u32, {unknown}>, ControlFlow<u32, !>>(ControlFlow<u32, !>) -> ControlFlow<u32, {unknown}> | ||
1048 | 430..479 'Contro...2, !>)': ControlFlow<u32, {unknown}> | ||
1049 | 457..478 'Contro...32, !>': ControlFlow<u32, !> | ||
1050 | "#]], | ||
1051 | ); | ||
1052 | } | ||
diff --git a/crates/hir_ty/src/utils.rs b/crates/hir_ty/src/utils.rs index 2f04ee57a..2f490fb92 100644 --- a/crates/hir_ty/src/utils.rs +++ b/crates/hir_ty/src/utils.rs | |||
@@ -1,6 +1,8 @@ | |||
1 | //! Helper functions for working with def, which don't need to be a separate | 1 | //! Helper functions for working with def, which don't need to be a separate |
2 | //! query, but can't be computed directly from `*Data` (ie, which need a `db`). | 2 | //! query, but can't be computed directly from `*Data` (ie, which need a `db`). |
3 | 3 | ||
4 | use std::iter; | ||
5 | |||
4 | use chalk_ir::{fold::Shift, BoundVar, DebruijnIndex}; | 6 | use chalk_ir::{fold::Shift, BoundVar, DebruijnIndex}; |
5 | use hir_def::{ | 7 | use hir_def::{ |
6 | db::DefDatabase, | 8 | db::DefDatabase, |
@@ -14,8 +16,12 @@ use hir_def::{ | |||
14 | AssocContainerId, GenericDefId, Lookup, TraitId, TypeAliasId, TypeParamId, | 16 | AssocContainerId, GenericDefId, Lookup, TraitId, TypeAliasId, TypeParamId, |
15 | }; | 17 | }; |
16 | use hir_expand::name::{name, Name}; | 18 | use hir_expand::name::{name, Name}; |
19 | use rustc_hash::FxHashSet; | ||
17 | 20 | ||
18 | use crate::{db::HirDatabase, Interner, Substitution, TraitRef, TraitRefExt, TyKind, WhereClause}; | 21 | use crate::{ |
22 | db::HirDatabase, ChalkTraitId, Interner, Substitution, TraitRef, TraitRefExt, TyKind, | ||
23 | WhereClause, | ||
24 | }; | ||
19 | 25 | ||
20 | fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> Vec<TraitId> { | 26 | fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> Vec<TraitId> { |
21 | let resolver = trait_.resolver(db); | 27 | let resolver = trait_.resolver(db); |
@@ -102,25 +108,35 @@ pub fn all_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> Vec<TraitId> { | |||
102 | /// `all_super_traits` is that we keep track of type parameters; for example if | 108 | /// `all_super_traits` is that we keep track of type parameters; for example if |
103 | /// we have `Self: Trait<u32, i32>` and `Trait<T, U>: OtherTrait<U>` we'll get | 109 | /// we have `Self: Trait<u32, i32>` and `Trait<T, U>: OtherTrait<U>` we'll get |
104 | /// `Self: OtherTrait<i32>`. | 110 | /// `Self: OtherTrait<i32>`. |
105 | pub(super) fn all_super_trait_refs(db: &dyn HirDatabase, trait_ref: TraitRef) -> Vec<TraitRef> { | 111 | pub(super) fn all_super_trait_refs(db: &dyn HirDatabase, trait_ref: TraitRef) -> SuperTraits { |
106 | // FIXME: replace by Chalk's `super_traits`, maybe make this a query | 112 | SuperTraits { db, seen: iter::once(trait_ref.trait_id).collect(), stack: vec![trait_ref] } |
113 | } | ||
107 | 114 | ||
108 | // we need to take care a bit here to avoid infinite loops in case of cycles | 115 | pub(super) struct SuperTraits<'a> { |
109 | // (i.e. if we have `trait A: B; trait B: A;`) | 116 | db: &'a dyn HirDatabase, |
110 | let mut result = vec![trait_ref]; | 117 | stack: Vec<TraitRef>, |
111 | let mut i = 0; | 118 | seen: FxHashSet<ChalkTraitId>, |
112 | while i < result.len() { | 119 | } |
113 | let t = &result[i]; | 120 | |
114 | // yeah this is quadratic, but trait hierarchies should be flat | 121 | impl<'a> SuperTraits<'a> { |
115 | // enough that this doesn't matter | 122 | fn elaborate(&mut self, trait_ref: &TraitRef) { |
116 | for tt in direct_super_trait_refs(db, t) { | 123 | let mut trait_refs = direct_super_trait_refs(self.db, trait_ref); |
117 | if !result.iter().any(|tr| tr.trait_id == tt.trait_id) { | 124 | trait_refs.retain(|tr| !self.seen.contains(&tr.trait_id)); |
118 | result.push(tt); | 125 | self.stack.extend(trait_refs); |
119 | } | 126 | } |
127 | } | ||
128 | |||
129 | impl<'a> Iterator for SuperTraits<'a> { | ||
130 | type Item = TraitRef; | ||
131 | |||
132 | fn next(&mut self) -> Option<Self::Item> { | ||
133 | if let Some(next) = self.stack.pop() { | ||
134 | self.elaborate(&next); | ||
135 | Some(next) | ||
136 | } else { | ||
137 | None | ||
120 | } | 138 | } |
121 | i += 1; | ||
122 | } | 139 | } |
123 | result | ||
124 | } | 140 | } |
125 | 141 | ||
126 | pub(super) fn associated_type_by_name_including_super_traits( | 142 | pub(super) fn associated_type_by_name_including_super_traits( |
@@ -128,7 +144,7 @@ pub(super) fn associated_type_by_name_including_super_traits( | |||
128 | trait_ref: TraitRef, | 144 | trait_ref: TraitRef, |
129 | name: &Name, | 145 | name: &Name, |
130 | ) -> Option<(TraitRef, TypeAliasId)> { | 146 | ) -> Option<(TraitRef, TypeAliasId)> { |
131 | all_super_trait_refs(db, trait_ref).into_iter().find_map(|t| { | 147 | all_super_trait_refs(db, trait_ref).find_map(|t| { |
132 | let assoc_type = db.trait_data(t.hir_trait_id()).associated_type_by_name(name)?; | 148 | let assoc_type = db.trait_data(t.hir_trait_id()).associated_type_by_name(name)?; |
133 | Some((t, assoc_type)) | 149 | Some((t, assoc_type)) |
134 | }) | 150 | }) |
diff --git a/crates/ide/src/diagnostics.rs b/crates/ide/src/diagnostics.rs index 1c911a8b2..b14f908b7 100644 --- a/crates/ide/src/diagnostics.rs +++ b/crates/ide/src/diagnostics.rs | |||
@@ -15,6 +15,7 @@ use hir::{ | |||
15 | diagnostics::{Diagnostic as _, DiagnosticCode, DiagnosticSinkBuilder}, | 15 | diagnostics::{Diagnostic as _, DiagnosticCode, DiagnosticSinkBuilder}, |
16 | InFile, Semantics, | 16 | InFile, Semantics, |
17 | }; | 17 | }; |
18 | use ide_assists::AssistResolveStrategy; | ||
18 | use ide_db::{base_db::SourceDatabase, RootDatabase}; | 19 | use ide_db::{base_db::SourceDatabase, RootDatabase}; |
19 | use itertools::Itertools; | 20 | use itertools::Itertools; |
20 | use rustc_hash::FxHashSet; | 21 | use rustc_hash::FxHashSet; |
@@ -84,7 +85,7 @@ pub struct DiagnosticsConfig { | |||
84 | pub(crate) fn diagnostics( | 85 | pub(crate) fn diagnostics( |
85 | db: &RootDatabase, | 86 | db: &RootDatabase, |
86 | config: &DiagnosticsConfig, | 87 | config: &DiagnosticsConfig, |
87 | resolve: bool, | 88 | resolve: &AssistResolveStrategy, |
88 | file_id: FileId, | 89 | file_id: FileId, |
89 | ) -> Vec<Diagnostic> { | 90 | ) -> Vec<Diagnostic> { |
90 | let _p = profile::span("diagnostics"); | 91 | let _p = profile::span("diagnostics"); |
@@ -212,7 +213,7 @@ pub(crate) fn diagnostics( | |||
212 | fn diagnostic_with_fix<D: DiagnosticWithFix>( | 213 | fn diagnostic_with_fix<D: DiagnosticWithFix>( |
213 | d: &D, | 214 | d: &D, |
214 | sema: &Semantics<RootDatabase>, | 215 | sema: &Semantics<RootDatabase>, |
215 | resolve: bool, | 216 | resolve: &AssistResolveStrategy, |
216 | ) -> Diagnostic { | 217 | ) -> Diagnostic { |
217 | Diagnostic::error(sema.diagnostics_display_range(d.display_source()).range, d.message()) | 218 | Diagnostic::error(sema.diagnostics_display_range(d.display_source()).range, d.message()) |
218 | .with_fix(d.fix(&sema, resolve)) | 219 | .with_fix(d.fix(&sema, resolve)) |
@@ -222,7 +223,7 @@ fn diagnostic_with_fix<D: DiagnosticWithFix>( | |||
222 | fn warning_with_fix<D: DiagnosticWithFix>( | 223 | fn warning_with_fix<D: DiagnosticWithFix>( |
223 | d: &D, | 224 | d: &D, |
224 | sema: &Semantics<RootDatabase>, | 225 | sema: &Semantics<RootDatabase>, |
225 | resolve: bool, | 226 | resolve: &AssistResolveStrategy, |
226 | ) -> Diagnostic { | 227 | ) -> Diagnostic { |
227 | Diagnostic::hint(sema.diagnostics_display_range(d.display_source()).range, d.message()) | 228 | Diagnostic::hint(sema.diagnostics_display_range(d.display_source()).range, d.message()) |
228 | .with_fix(d.fix(&sema, resolve)) | 229 | .with_fix(d.fix(&sema, resolve)) |
@@ -299,6 +300,7 @@ fn unresolved_fix(id: &'static str, label: &str, target: TextRange) -> Assist { | |||
299 | #[cfg(test)] | 300 | #[cfg(test)] |
300 | mod tests { | 301 | mod tests { |
301 | use expect_test::{expect, Expect}; | 302 | use expect_test::{expect, Expect}; |
303 | use ide_assists::AssistResolveStrategy; | ||
302 | use stdx::trim_indent; | 304 | use stdx::trim_indent; |
303 | use test_utils::assert_eq_text; | 305 | use test_utils::assert_eq_text; |
304 | 306 | ||
@@ -314,7 +316,11 @@ mod tests { | |||
314 | 316 | ||
315 | let (analysis, file_position) = fixture::position(ra_fixture_before); | 317 | let (analysis, file_position) = fixture::position(ra_fixture_before); |
316 | let diagnostic = analysis | 318 | let diagnostic = analysis |
317 | .diagnostics(&DiagnosticsConfig::default(), true, file_position.file_id) | 319 | .diagnostics( |
320 | &DiagnosticsConfig::default(), | ||
321 | AssistResolveStrategy::All, | ||
322 | file_position.file_id, | ||
323 | ) | ||
318 | .unwrap() | 324 | .unwrap() |
319 | .pop() | 325 | .pop() |
320 | .unwrap(); | 326 | .unwrap(); |
@@ -343,7 +349,11 @@ mod tests { | |||
343 | fn check_no_fix(ra_fixture: &str) { | 349 | fn check_no_fix(ra_fixture: &str) { |
344 | let (analysis, file_position) = fixture::position(ra_fixture); | 350 | let (analysis, file_position) = fixture::position(ra_fixture); |
345 | let diagnostic = analysis | 351 | let diagnostic = analysis |
346 | .diagnostics(&DiagnosticsConfig::default(), true, file_position.file_id) | 352 | .diagnostics( |
353 | &DiagnosticsConfig::default(), | ||
354 | AssistResolveStrategy::All, | ||
355 | file_position.file_id, | ||
356 | ) | ||
347 | .unwrap() | 357 | .unwrap() |
348 | .pop() | 358 | .pop() |
349 | .unwrap(); | 359 | .unwrap(); |
@@ -357,7 +367,9 @@ mod tests { | |||
357 | let diagnostics = files | 367 | let diagnostics = files |
358 | .into_iter() | 368 | .into_iter() |
359 | .flat_map(|file_id| { | 369 | .flat_map(|file_id| { |
360 | analysis.diagnostics(&DiagnosticsConfig::default(), true, file_id).unwrap() | 370 | analysis |
371 | .diagnostics(&DiagnosticsConfig::default(), AssistResolveStrategy::All, file_id) | ||
372 | .unwrap() | ||
361 | }) | 373 | }) |
362 | .collect::<Vec<_>>(); | 374 | .collect::<Vec<_>>(); |
363 | assert_eq!(diagnostics.len(), 0, "unexpected diagnostics:\n{:#?}", diagnostics); | 375 | assert_eq!(diagnostics.len(), 0, "unexpected diagnostics:\n{:#?}", diagnostics); |
@@ -365,8 +377,9 @@ mod tests { | |||
365 | 377 | ||
366 | fn check_expect(ra_fixture: &str, expect: Expect) { | 378 | fn check_expect(ra_fixture: &str, expect: Expect) { |
367 | let (analysis, file_id) = fixture::file(ra_fixture); | 379 | let (analysis, file_id) = fixture::file(ra_fixture); |
368 | let diagnostics = | 380 | let diagnostics = analysis |
369 | analysis.diagnostics(&DiagnosticsConfig::default(), true, file_id).unwrap(); | 381 | .diagnostics(&DiagnosticsConfig::default(), AssistResolveStrategy::All, file_id) |
382 | .unwrap(); | ||
370 | expect.assert_debug_eq(&diagnostics) | 383 | expect.assert_debug_eq(&diagnostics) |
371 | } | 384 | } |
372 | 385 | ||
@@ -911,11 +924,13 @@ struct Foo { | |||
911 | 924 | ||
912 | let (analysis, file_id) = fixture::file(r#"mod foo;"#); | 925 | let (analysis, file_id) = fixture::file(r#"mod foo;"#); |
913 | 926 | ||
914 | let diagnostics = analysis.diagnostics(&config, true, file_id).unwrap(); | 927 | let diagnostics = |
928 | analysis.diagnostics(&config, AssistResolveStrategy::All, file_id).unwrap(); | ||
915 | assert!(diagnostics.is_empty()); | 929 | assert!(diagnostics.is_empty()); |
916 | 930 | ||
917 | let diagnostics = | 931 | let diagnostics = analysis |
918 | analysis.diagnostics(&DiagnosticsConfig::default(), true, file_id).unwrap(); | 932 | .diagnostics(&DiagnosticsConfig::default(), AssistResolveStrategy::All, file_id) |
933 | .unwrap(); | ||
919 | assert!(!diagnostics.is_empty()); | 934 | assert!(!diagnostics.is_empty()); |
920 | } | 935 | } |
921 | 936 | ||
@@ -1022,7 +1037,11 @@ impl TestStruct { | |||
1022 | 1037 | ||
1023 | let (analysis, file_position) = fixture::position(input); | 1038 | let (analysis, file_position) = fixture::position(input); |
1024 | let diagnostics = analysis | 1039 | let diagnostics = analysis |
1025 | .diagnostics(&DiagnosticsConfig::default(), true, file_position.file_id) | 1040 | .diagnostics( |
1041 | &DiagnosticsConfig::default(), | ||
1042 | AssistResolveStrategy::All, | ||
1043 | file_position.file_id, | ||
1044 | ) | ||
1026 | .unwrap(); | 1045 | .unwrap(); |
1027 | assert_eq!(diagnostics.len(), 1); | 1046 | assert_eq!(diagnostics.len(), 1); |
1028 | 1047 | ||
diff --git a/crates/ide/src/diagnostics/fixes.rs b/crates/ide/src/diagnostics/fixes.rs index 7be8b3459..15821500f 100644 --- a/crates/ide/src/diagnostics/fixes.rs +++ b/crates/ide/src/diagnostics/fixes.rs | |||
@@ -8,6 +8,7 @@ use hir::{ | |||
8 | }, | 8 | }, |
9 | HasSource, HirDisplay, InFile, Semantics, VariantDef, | 9 | HasSource, HirDisplay, InFile, Semantics, VariantDef, |
10 | }; | 10 | }; |
11 | use ide_assists::AssistResolveStrategy; | ||
11 | use ide_db::{ | 12 | use ide_db::{ |
12 | base_db::{AnchoredPathBuf, FileId}, | 13 | base_db::{AnchoredPathBuf, FileId}, |
13 | source_change::{FileSystemEdit, SourceChange}, | 14 | source_change::{FileSystemEdit, SourceChange}, |
@@ -35,11 +36,19 @@ pub(crate) trait DiagnosticWithFix: Diagnostic { | |||
35 | /// | 36 | /// |
36 | /// If `resolve` is false, the edit will be computed later, on demand, and | 37 | /// If `resolve` is false, the edit will be computed later, on demand, and |
37 | /// can be omitted. | 38 | /// can be omitted. |
38 | fn fix(&self, sema: &Semantics<RootDatabase>, _resolve: bool) -> Option<Assist>; | 39 | fn fix( |
40 | &self, | ||
41 | sema: &Semantics<RootDatabase>, | ||
42 | _resolve: &AssistResolveStrategy, | ||
43 | ) -> Option<Assist>; | ||
39 | } | 44 | } |
40 | 45 | ||
41 | impl DiagnosticWithFix for UnresolvedModule { | 46 | impl DiagnosticWithFix for UnresolvedModule { |
42 | fn fix(&self, sema: &Semantics<RootDatabase>, _resolve: bool) -> Option<Assist> { | 47 | fn fix( |
48 | &self, | ||
49 | sema: &Semantics<RootDatabase>, | ||
50 | _resolve: &AssistResolveStrategy, | ||
51 | ) -> Option<Assist> { | ||
43 | let root = sema.db.parse_or_expand(self.file)?; | 52 | let root = sema.db.parse_or_expand(self.file)?; |
44 | let unresolved_module = self.decl.to_node(&root); | 53 | let unresolved_module = self.decl.to_node(&root); |
45 | Some(fix( | 54 | Some(fix( |
@@ -59,7 +68,11 @@ impl DiagnosticWithFix for UnresolvedModule { | |||
59 | } | 68 | } |
60 | 69 | ||
61 | impl DiagnosticWithFix for NoSuchField { | 70 | impl DiagnosticWithFix for NoSuchField { |
62 | fn fix(&self, sema: &Semantics<RootDatabase>, _resolve: bool) -> Option<Assist> { | 71 | fn fix( |
72 | &self, | ||
73 | sema: &Semantics<RootDatabase>, | ||
74 | _resolve: &AssistResolveStrategy, | ||
75 | ) -> Option<Assist> { | ||
63 | let root = sema.db.parse_or_expand(self.file)?; | 76 | let root = sema.db.parse_or_expand(self.file)?; |
64 | missing_record_expr_field_fix( | 77 | missing_record_expr_field_fix( |
65 | &sema, | 78 | &sema, |
@@ -70,7 +83,11 @@ impl DiagnosticWithFix for NoSuchField { | |||
70 | } | 83 | } |
71 | 84 | ||
72 | impl DiagnosticWithFix for MissingFields { | 85 | impl DiagnosticWithFix for MissingFields { |
73 | fn fix(&self, sema: &Semantics<RootDatabase>, _resolve: bool) -> Option<Assist> { | 86 | fn fix( |
87 | &self, | ||
88 | sema: &Semantics<RootDatabase>, | ||
89 | _resolve: &AssistResolveStrategy, | ||
90 | ) -> Option<Assist> { | ||
74 | // Note that although we could add a diagnostics to | 91 | // Note that although we could add a diagnostics to |
75 | // fill the missing tuple field, e.g : | 92 | // fill the missing tuple field, e.g : |
76 | // `struct A(usize);` | 93 | // `struct A(usize);` |
@@ -106,7 +123,11 @@ impl DiagnosticWithFix for MissingFields { | |||
106 | } | 123 | } |
107 | 124 | ||
108 | impl DiagnosticWithFix for MissingOkOrSomeInTailExpr { | 125 | impl DiagnosticWithFix for MissingOkOrSomeInTailExpr { |
109 | fn fix(&self, sema: &Semantics<RootDatabase>, _resolve: bool) -> Option<Assist> { | 126 | fn fix( |
127 | &self, | ||
128 | sema: &Semantics<RootDatabase>, | ||
129 | _resolve: &AssistResolveStrategy, | ||
130 | ) -> Option<Assist> { | ||
110 | let root = sema.db.parse_or_expand(self.file)?; | 131 | let root = sema.db.parse_or_expand(self.file)?; |
111 | let tail_expr = self.expr.to_node(&root); | 132 | let tail_expr = self.expr.to_node(&root); |
112 | let tail_expr_range = tail_expr.syntax().text_range(); | 133 | let tail_expr_range = tail_expr.syntax().text_range(); |
@@ -119,7 +140,11 @@ impl DiagnosticWithFix for MissingOkOrSomeInTailExpr { | |||
119 | } | 140 | } |
120 | 141 | ||
121 | impl DiagnosticWithFix for RemoveThisSemicolon { | 142 | impl DiagnosticWithFix for RemoveThisSemicolon { |
122 | fn fix(&self, sema: &Semantics<RootDatabase>, _resolve: bool) -> Option<Assist> { | 143 | fn fix( |
144 | &self, | ||
145 | sema: &Semantics<RootDatabase>, | ||
146 | _resolve: &AssistResolveStrategy, | ||
147 | ) -> Option<Assist> { | ||
123 | let root = sema.db.parse_or_expand(self.file)?; | 148 | let root = sema.db.parse_or_expand(self.file)?; |
124 | 149 | ||
125 | let semicolon = self | 150 | let semicolon = self |
@@ -139,7 +164,11 @@ impl DiagnosticWithFix for RemoveThisSemicolon { | |||
139 | } | 164 | } |
140 | 165 | ||
141 | impl DiagnosticWithFix for IncorrectCase { | 166 | impl DiagnosticWithFix for IncorrectCase { |
142 | fn fix(&self, sema: &Semantics<RootDatabase>, resolve: bool) -> Option<Assist> { | 167 | fn fix( |
168 | &self, | ||
169 | sema: &Semantics<RootDatabase>, | ||
170 | resolve: &AssistResolveStrategy, | ||
171 | ) -> Option<Assist> { | ||
143 | let root = sema.db.parse_or_expand(self.file)?; | 172 | let root = sema.db.parse_or_expand(self.file)?; |
144 | let name_node = self.ident.to_node(&root); | 173 | let name_node = self.ident.to_node(&root); |
145 | 174 | ||
@@ -149,7 +178,7 @@ impl DiagnosticWithFix for IncorrectCase { | |||
149 | 178 | ||
150 | let label = format!("Rename to {}", self.suggested_text); | 179 | let label = format!("Rename to {}", self.suggested_text); |
151 | let mut res = unresolved_fix("change_case", &label, frange.range); | 180 | let mut res = unresolved_fix("change_case", &label, frange.range); |
152 | if resolve { | 181 | if resolve.should_resolve(&res.id) { |
153 | let source_change = rename_with_semantics(sema, file_position, &self.suggested_text); | 182 | let source_change = rename_with_semantics(sema, file_position, &self.suggested_text); |
154 | res.source_change = Some(source_change.ok().unwrap_or_default()); | 183 | res.source_change = Some(source_change.ok().unwrap_or_default()); |
155 | } | 184 | } |
@@ -159,7 +188,11 @@ impl DiagnosticWithFix for IncorrectCase { | |||
159 | } | 188 | } |
160 | 189 | ||
161 | impl DiagnosticWithFix for ReplaceFilterMapNextWithFindMap { | 190 | impl DiagnosticWithFix for ReplaceFilterMapNextWithFindMap { |
162 | fn fix(&self, sema: &Semantics<RootDatabase>, _resolve: bool) -> Option<Assist> { | 191 | fn fix( |
192 | &self, | ||
193 | sema: &Semantics<RootDatabase>, | ||
194 | _resolve: &AssistResolveStrategy, | ||
195 | ) -> Option<Assist> { | ||
163 | let root = sema.db.parse_or_expand(self.file)?; | 196 | let root = sema.db.parse_or_expand(self.file)?; |
164 | let next_expr = self.next_expr.to_node(&root); | 197 | let next_expr = self.next_expr.to_node(&root); |
165 | let next_call = ast::MethodCallExpr::cast(next_expr.syntax().clone())?; | 198 | let next_call = ast::MethodCallExpr::cast(next_expr.syntax().clone())?; |
diff --git a/crates/ide/src/diagnostics/unlinked_file.rs b/crates/ide/src/diagnostics/unlinked_file.rs index 7d39f4fbe..93fd25dea 100644 --- a/crates/ide/src/diagnostics/unlinked_file.rs +++ b/crates/ide/src/diagnostics/unlinked_file.rs | |||
@@ -5,6 +5,7 @@ use hir::{ | |||
5 | diagnostics::{Diagnostic, DiagnosticCode}, | 5 | diagnostics::{Diagnostic, DiagnosticCode}, |
6 | InFile, | 6 | InFile, |
7 | }; | 7 | }; |
8 | use ide_assists::AssistResolveStrategy; | ||
8 | use ide_db::{ | 9 | use ide_db::{ |
9 | base_db::{FileId, FileLoader, SourceDatabase, SourceDatabaseExt}, | 10 | base_db::{FileId, FileLoader, SourceDatabase, SourceDatabaseExt}, |
10 | source_change::SourceChange, | 11 | source_change::SourceChange, |
@@ -50,7 +51,11 @@ impl Diagnostic for UnlinkedFile { | |||
50 | } | 51 | } |
51 | 52 | ||
52 | impl DiagnosticWithFix for UnlinkedFile { | 53 | impl DiagnosticWithFix for UnlinkedFile { |
53 | fn fix(&self, sema: &hir::Semantics<RootDatabase>, _resolve: bool) -> Option<Assist> { | 54 | fn fix( |
55 | &self, | ||
56 | sema: &hir::Semantics<RootDatabase>, | ||
57 | _resolve: &AssistResolveStrategy, | ||
58 | ) -> Option<Assist> { | ||
54 | // If there's an existing module that could add a `mod` item to include the unlinked file, | 59 | // If there's an existing module that could add a `mod` item to include the unlinked file, |
55 | // suggest that as a fix. | 60 | // suggest that as a fix. |
56 | 61 | ||
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index d5ef054d8..e0bf660c4 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs | |||
@@ -218,9 +218,7 @@ fn hint_iterator( | |||
218 | ty: &hir::Type, | 218 | ty: &hir::Type, |
219 | ) -> Option<SmolStr> { | 219 | ) -> Option<SmolStr> { |
220 | let db = sema.db; | 220 | let db = sema.db; |
221 | let strukt = std::iter::successors(Some(ty.clone()), |ty| ty.remove_ref()) | 221 | let strukt = ty.strip_references().as_adt()?; |
222 | .last() | ||
223 | .and_then(|strukt| strukt.as_adt())?; | ||
224 | let krate = strukt.krate(db); | 222 | let krate = strukt.krate(db); |
225 | if krate != famous_defs.core()? { | 223 | if krate != famous_defs.core()? { |
226 | return None; | 224 | return None; |
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 99e45633e..8e5b72044 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs | |||
@@ -87,7 +87,9 @@ pub use crate::{ | |||
87 | }, | 87 | }, |
88 | }; | 88 | }; |
89 | pub use hir::{Documentation, Semantics}; | 89 | pub use hir::{Documentation, Semantics}; |
90 | pub use ide_assists::{Assist, AssistConfig, AssistId, AssistKind}; | 90 | pub use ide_assists::{ |
91 | Assist, AssistConfig, AssistId, AssistKind, AssistResolveStrategy, SingleResolve, | ||
92 | }; | ||
91 | pub use ide_completion::{ | 93 | pub use ide_completion::{ |
92 | CompletionConfig, CompletionItem, CompletionItemKind, CompletionRelevance, ImportEdit, | 94 | CompletionConfig, CompletionItem, CompletionItemKind, CompletionRelevance, ImportEdit, |
93 | InsertTextFormat, | 95 | InsertTextFormat, |
@@ -518,12 +520,13 @@ impl Analysis { | |||
518 | pub fn assists( | 520 | pub fn assists( |
519 | &self, | 521 | &self, |
520 | config: &AssistConfig, | 522 | config: &AssistConfig, |
521 | resolve: bool, | 523 | resolve: AssistResolveStrategy, |
522 | frange: FileRange, | 524 | frange: FileRange, |
523 | ) -> Cancelable<Vec<Assist>> { | 525 | ) -> Cancelable<Vec<Assist>> { |
524 | self.with_db(|db| { | 526 | self.with_db(|db| { |
527 | let ssr_assists = ssr::ssr_assists(db, &resolve, frange); | ||
525 | let mut acc = Assist::get(db, config, resolve, frange); | 528 | let mut acc = Assist::get(db, config, resolve, frange); |
526 | ssr::add_ssr_assist(db, &mut acc, resolve, frange); | 529 | acc.extend(ssr_assists.into_iter()); |
527 | acc | 530 | acc |
528 | }) | 531 | }) |
529 | } | 532 | } |
@@ -532,10 +535,10 @@ impl Analysis { | |||
532 | pub fn diagnostics( | 535 | pub fn diagnostics( |
533 | &self, | 536 | &self, |
534 | config: &DiagnosticsConfig, | 537 | config: &DiagnosticsConfig, |
535 | resolve: bool, | 538 | resolve: AssistResolveStrategy, |
536 | file_id: FileId, | 539 | file_id: FileId, |
537 | ) -> Cancelable<Vec<Diagnostic>> { | 540 | ) -> Cancelable<Vec<Diagnostic>> { |
538 | self.with_db(|db| diagnostics::diagnostics(db, config, resolve, file_id)) | 541 | self.with_db(|db| diagnostics::diagnostics(db, config, &resolve, file_id)) |
539 | } | 542 | } |
540 | 543 | ||
541 | /// Convenience function to return assists + quick fixes for diagnostics | 544 | /// Convenience function to return assists + quick fixes for diagnostics |
@@ -543,7 +546,7 @@ impl Analysis { | |||
543 | &self, | 546 | &self, |
544 | assist_config: &AssistConfig, | 547 | assist_config: &AssistConfig, |
545 | diagnostics_config: &DiagnosticsConfig, | 548 | diagnostics_config: &DiagnosticsConfig, |
546 | resolve: bool, | 549 | resolve: AssistResolveStrategy, |
547 | frange: FileRange, | 550 | frange: FileRange, |
548 | ) -> Cancelable<Vec<Assist>> { | 551 | ) -> Cancelable<Vec<Assist>> { |
549 | let include_fixes = match &assist_config.allowed { | 552 | let include_fixes = match &assist_config.allowed { |
@@ -552,17 +555,21 @@ impl Analysis { | |||
552 | }; | 555 | }; |
553 | 556 | ||
554 | self.with_db(|db| { | 557 | self.with_db(|db| { |
558 | let ssr_assists = ssr::ssr_assists(db, &resolve, frange); | ||
559 | let diagnostic_assists = if include_fixes { | ||
560 | diagnostics::diagnostics(db, diagnostics_config, &resolve, frange.file_id) | ||
561 | .into_iter() | ||
562 | .filter_map(|it| it.fix) | ||
563 | .filter(|it| it.target.intersect(frange.range).is_some()) | ||
564 | .collect() | ||
565 | } else { | ||
566 | Vec::new() | ||
567 | }; | ||
568 | |||
555 | let mut res = Assist::get(db, assist_config, resolve, frange); | 569 | let mut res = Assist::get(db, assist_config, resolve, frange); |
556 | ssr::add_ssr_assist(db, &mut res, resolve, frange); | 570 | res.extend(ssr_assists.into_iter()); |
557 | 571 | res.extend(diagnostic_assists.into_iter()); | |
558 | if include_fixes { | 572 | |
559 | res.extend( | ||
560 | diagnostics::diagnostics(db, diagnostics_config, resolve, frange.file_id) | ||
561 | .into_iter() | ||
562 | .filter_map(|it| it.fix) | ||
563 | .filter(|it| it.target.intersect(frange.range).is_some()), | ||
564 | ); | ||
565 | } | ||
566 | res | 573 | res |
567 | }) | 574 | }) |
568 | } | 575 | } |
diff --git a/crates/ide/src/prime_caches.rs b/crates/ide/src/prime_caches.rs index 03597f507..d912a01b8 100644 --- a/crates/ide/src/prime_caches.rs +++ b/crates/ide/src/prime_caches.rs | |||
@@ -27,6 +27,7 @@ pub(crate) fn prime_caches(db: &RootDatabase, cb: &(dyn Fn(PrimeCachesProgress) | |||
27 | let topo = &graph.crates_in_topological_order(); | 27 | let topo = &graph.crates_in_topological_order(); |
28 | 28 | ||
29 | cb(PrimeCachesProgress::Started); | 29 | cb(PrimeCachesProgress::Started); |
30 | // Take care to emit the finish signal even when the computation is canceled. | ||
30 | let _d = stdx::defer(|| cb(PrimeCachesProgress::Finished)); | 31 | let _d = stdx::defer(|| cb(PrimeCachesProgress::Finished)); |
31 | 32 | ||
32 | // FIXME: This would be easy to parallelize, since it's in the ideal ordering for that. | 33 | // FIXME: This would be easy to parallelize, since it's in the ideal ordering for that. |
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 3eb9e27ee..f76715d84 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs | |||
@@ -304,11 +304,11 @@ fn module_def_doctest(sema: &Semantics<RootDatabase>, def: hir::ModuleDef) -> Op | |||
304 | let name = adt.name(sema.db); | 304 | let name = adt.name(sema.db); |
305 | let idx = path.rfind(':').map_or(0, |idx| idx + 1); | 305 | let idx = path.rfind(':').map_or(0, |idx| idx + 1); |
306 | let (prefix, suffix) = path.split_at(idx); | 306 | let (prefix, suffix) = path.split_at(idx); |
307 | let mut ty_params = ty.type_parameters().peekable(); | 307 | let mut ty_args = ty.type_arguments().peekable(); |
308 | let params = if ty_params.peek().is_some() { | 308 | let params = if ty_args.peek().is_some() { |
309 | format!( | 309 | format!( |
310 | "<{}>", | 310 | "<{}>", |
311 | ty_params.format_with(", ", |ty, cb| cb(&ty.display(sema.db))) | 311 | ty_args.format_with(", ", |ty, cb| cb(&ty.display(sema.db))) |
312 | ) | 312 | ) |
313 | } else { | 313 | } else { |
314 | String::new() | 314 | String::new() |
diff --git a/crates/ide/src/ssr.rs b/crates/ide/src/ssr.rs index f3638d928..57ec80261 100644 --- a/crates/ide/src/ssr.rs +++ b/crates/ide/src/ssr.rs | |||
@@ -2,18 +2,23 @@ | |||
2 | //! assist in ide_assists because that would require the ide_assists crate | 2 | //! assist in ide_assists because that would require the ide_assists crate |
3 | //! depend on the ide_ssr crate. | 3 | //! depend on the ide_ssr crate. |
4 | 4 | ||
5 | use ide_assists::{Assist, AssistId, AssistKind, GroupLabel}; | 5 | use ide_assists::{Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel}; |
6 | use ide_db::{base_db::FileRange, label::Label, source_change::SourceChange, RootDatabase}; | 6 | use ide_db::{base_db::FileRange, label::Label, source_change::SourceChange, RootDatabase}; |
7 | 7 | ||
8 | pub(crate) fn add_ssr_assist( | 8 | pub(crate) fn ssr_assists( |
9 | db: &RootDatabase, | 9 | db: &RootDatabase, |
10 | base: &mut Vec<Assist>, | 10 | resolve: &AssistResolveStrategy, |
11 | resolve: bool, | ||
12 | frange: FileRange, | 11 | frange: FileRange, |
13 | ) -> Option<()> { | 12 | ) -> Vec<Assist> { |
14 | let (match_finder, comment_range) = ide_ssr::ssr_from_comment(db, frange)?; | 13 | let mut ssr_assists = Vec::with_capacity(2); |
15 | 14 | ||
16 | let (source_change_for_file, source_change_for_workspace) = if resolve { | 15 | let (match_finder, comment_range) = match ide_ssr::ssr_from_comment(db, frange) { |
16 | Some(ssr_data) => ssr_data, | ||
17 | None => return ssr_assists, | ||
18 | }; | ||
19 | let id = AssistId("ssr", AssistKind::RefactorRewrite); | ||
20 | |||
21 | let (source_change_for_file, source_change_for_workspace) = if resolve.should_resolve(&id) { | ||
17 | let edits = match_finder.edits(); | 22 | let edits = match_finder.edits(); |
18 | 23 | ||
19 | let source_change_for_file = { | 24 | let source_change_for_file = { |
@@ -35,16 +40,17 @@ pub(crate) fn add_ssr_assist( | |||
35 | 40 | ||
36 | for (label, source_change) in assists.into_iter() { | 41 | for (label, source_change) in assists.into_iter() { |
37 | let assist = Assist { | 42 | let assist = Assist { |
38 | id: AssistId("ssr", AssistKind::RefactorRewrite), | 43 | id, |
39 | label: Label::new(label), | 44 | label: Label::new(label), |
40 | group: Some(GroupLabel("Apply SSR".into())), | 45 | group: Some(GroupLabel("Apply SSR".into())), |
41 | target: comment_range, | 46 | target: comment_range, |
42 | source_change, | 47 | source_change, |
43 | }; | 48 | }; |
44 | 49 | ||
45 | base.push(assist); | 50 | ssr_assists.push(assist); |
46 | } | 51 | } |
47 | Some(()) | 52 | |
53 | ssr_assists | ||
48 | } | 54 | } |
49 | 55 | ||
50 | #[cfg(test)] | 56 | #[cfg(test)] |
@@ -52,7 +58,7 @@ mod tests { | |||
52 | use std::sync::Arc; | 58 | use std::sync::Arc; |
53 | 59 | ||
54 | use expect_test::expect; | 60 | use expect_test::expect; |
55 | use ide_assists::Assist; | 61 | use ide_assists::{Assist, AssistResolveStrategy}; |
56 | use ide_db::{ | 62 | use ide_db::{ |
57 | base_db::{fixture::WithFixture, salsa::Durability, FileRange}, | 63 | base_db::{fixture::WithFixture, salsa::Durability, FileRange}, |
58 | symbol_index::SymbolsDatabase, | 64 | symbol_index::SymbolsDatabase, |
@@ -60,24 +66,14 @@ mod tests { | |||
60 | }; | 66 | }; |
61 | use rustc_hash::FxHashSet; | 67 | use rustc_hash::FxHashSet; |
62 | 68 | ||
63 | use super::add_ssr_assist; | 69 | use super::ssr_assists; |
64 | 70 | ||
65 | fn get_assists(ra_fixture: &str, resolve: bool) -> Vec<Assist> { | 71 | fn get_assists(ra_fixture: &str, resolve: AssistResolveStrategy) -> Vec<Assist> { |
66 | let (mut db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture); | 72 | let (mut db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture); |
67 | let mut local_roots = FxHashSet::default(); | 73 | let mut local_roots = FxHashSet::default(); |
68 | local_roots.insert(ide_db::base_db::fixture::WORKSPACE); | 74 | local_roots.insert(ide_db::base_db::fixture::WORKSPACE); |
69 | db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); | 75 | db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); |
70 | 76 | ssr_assists(&db, &resolve, FileRange { file_id, range: range_or_offset.into() }) | |
71 | let mut assists = vec![]; | ||
72 | |||
73 | add_ssr_assist( | ||
74 | &db, | ||
75 | &mut assists, | ||
76 | resolve, | ||
77 | FileRange { file_id, range: range_or_offset.into() }, | ||
78 | ); | ||
79 | |||
80 | assists | ||
81 | } | 77 | } |
82 | 78 | ||
83 | #[test] | 79 | #[test] |
@@ -88,16 +84,13 @@ mod tests { | |||
88 | // This is foo $0 | 84 | // This is foo $0 |
89 | fn foo() {} | 85 | fn foo() {} |
90 | "#; | 86 | "#; |
91 | let resolve = true; | 87 | let assists = get_assists(ra_fixture, AssistResolveStrategy::All); |
92 | |||
93 | let assists = get_assists(ra_fixture, resolve); | ||
94 | 88 | ||
95 | assert_eq!(0, assists.len()); | 89 | assert_eq!(0, assists.len()); |
96 | } | 90 | } |
97 | 91 | ||
98 | #[test] | 92 | #[test] |
99 | fn resolve_edits_true() { | 93 | fn resolve_edits_true() { |
100 | let resolve = true; | ||
101 | let assists = get_assists( | 94 | let assists = get_assists( |
102 | r#" | 95 | r#" |
103 | //- /lib.rs | 96 | //- /lib.rs |
@@ -109,7 +102,7 @@ mod tests { | |||
109 | //- /bar.rs | 102 | //- /bar.rs |
110 | fn bar() { 2 } | 103 | fn bar() { 2 } |
111 | "#, | 104 | "#, |
112 | resolve, | 105 | AssistResolveStrategy::All, |
113 | ); | 106 | ); |
114 | 107 | ||
115 | assert_eq!(2, assists.len()); | 108 | assert_eq!(2, assists.len()); |
@@ -200,7 +193,6 @@ mod tests { | |||
200 | 193 | ||
201 | #[test] | 194 | #[test] |
202 | fn resolve_edits_false() { | 195 | fn resolve_edits_false() { |
203 | let resolve = false; | ||
204 | let assists = get_assists( | 196 | let assists = get_assists( |
205 | r#" | 197 | r#" |
206 | //- /lib.rs | 198 | //- /lib.rs |
@@ -212,7 +204,7 @@ mod tests { | |||
212 | //- /bar.rs | 204 | //- /bar.rs |
213 | fn bar() { 2 } | 205 | fn bar() { 2 } |
214 | "#, | 206 | "#, |
215 | resolve, | 207 | AssistResolveStrategy::None, |
216 | ); | 208 | ); |
217 | 209 | ||
218 | assert_eq!(2, assists.len()); | 210 | assert_eq!(2, assists.len()); |
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html index 638f42c2f..8d83ba206 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html | |||
@@ -142,6 +142,7 @@ It is beyond me why you'd use these when you got /// | |||
142 | ```rust | 142 | ```rust |
143 | </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation"> | 143 | </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation"> |
144 | ``` | 144 | ``` |
145 | </span><span class="function documentation injected intra_doc_link">[`block_comments2`]</span><span class="comment documentation"> tests these with indentation | ||
145 | */</span> | 146 | */</span> |
146 | <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration">block_comments</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span> | 147 | <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration">block_comments</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span> |
147 | 148 | ||
@@ -150,5 +151,6 @@ It is beyond me why you'd use these when you got /// | |||
150 | ```rust | 151 | ```rust |
151 | </span><span class="comment documentation"> </span><span class="none injected"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation"> | 152 | </span><span class="comment documentation"> </span><span class="none injected"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation"> |
152 | ``` | 153 | ``` |
154 | </span><span class="function documentation injected intra_doc_link">[`block_comments`]</span><span class="comment documentation"> tests these without indentation | ||
153 | */</span> | 155 | */</span> |
154 | <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration">block_comments2</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span></code></pre> \ No newline at end of file | 156 | <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration">block_comments2</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span></code></pre> \ No newline at end of file |
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 17cc6334b..b6e952b08 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs | |||
@@ -618,6 +618,7 @@ It is beyond me why you'd use these when you got /// | |||
618 | ```rust | 618 | ```rust |
619 | let _ = example(&[1, 2, 3]); | 619 | let _ = example(&[1, 2, 3]); |
620 | ``` | 620 | ``` |
621 | [`block_comments2`] tests these with indentation | ||
621 | */ | 622 | */ |
622 | pub fn block_comments() {} | 623 | pub fn block_comments() {} |
623 | 624 | ||
@@ -626,6 +627,7 @@ pub fn block_comments() {} | |||
626 | ```rust | 627 | ```rust |
627 | let _ = example(&[1, 2, 3]); | 628 | let _ = example(&[1, 2, 3]); |
628 | ``` | 629 | ``` |
630 | [`block_comments`] tests these without indentation | ||
629 | */ | 631 | */ |
630 | pub fn block_comments2() {} | 632 | pub fn block_comments2() {} |
631 | "# | 633 | "# |
diff --git a/crates/ide_assists/src/assist_context.rs b/crates/ide_assists/src/assist_context.rs index 8714e4978..112939948 100644 --- a/crates/ide_assists/src/assist_context.rs +++ b/crates/ide_assists/src/assist_context.rs | |||
@@ -19,7 +19,9 @@ use syntax::{ | |||
19 | }; | 19 | }; |
20 | use text_edit::{TextEdit, TextEditBuilder}; | 20 | use text_edit::{TextEdit, TextEditBuilder}; |
21 | 21 | ||
22 | use crate::{assist_config::AssistConfig, Assist, AssistId, AssistKind, GroupLabel}; | 22 | use crate::{ |
23 | assist_config::AssistConfig, Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel, | ||
24 | }; | ||
23 | 25 | ||
24 | /// `AssistContext` allows to apply an assist or check if it could be applied. | 26 | /// `AssistContext` allows to apply an assist or check if it could be applied. |
25 | /// | 27 | /// |
@@ -105,14 +107,14 @@ impl<'a> AssistContext<'a> { | |||
105 | } | 107 | } |
106 | 108 | ||
107 | pub(crate) struct Assists { | 109 | pub(crate) struct Assists { |
108 | resolve: bool, | ||
109 | file: FileId, | 110 | file: FileId, |
111 | resolve: AssistResolveStrategy, | ||
110 | buf: Vec<Assist>, | 112 | buf: Vec<Assist>, |
111 | allowed: Option<Vec<AssistKind>>, | 113 | allowed: Option<Vec<AssistKind>>, |
112 | } | 114 | } |
113 | 115 | ||
114 | impl Assists { | 116 | impl Assists { |
115 | pub(crate) fn new(ctx: &AssistContext, resolve: bool) -> Assists { | 117 | pub(crate) fn new(ctx: &AssistContext, resolve: AssistResolveStrategy) -> Assists { |
116 | Assists { | 118 | Assists { |
117 | resolve, | 119 | resolve, |
118 | file: ctx.frange.file_id, | 120 | file: ctx.frange.file_id, |
@@ -158,7 +160,7 @@ impl Assists { | |||
158 | } | 160 | } |
159 | 161 | ||
160 | fn add_impl(&mut self, mut assist: Assist, f: impl FnOnce(&mut AssistBuilder)) -> Option<()> { | 162 | fn add_impl(&mut self, mut assist: Assist, f: impl FnOnce(&mut AssistBuilder)) -> Option<()> { |
161 | let source_change = if self.resolve { | 163 | let source_change = if self.resolve.should_resolve(&assist.id) { |
162 | let mut builder = AssistBuilder::new(self.file); | 164 | let mut builder = AssistBuilder::new(self.file); |
163 | f(&mut builder); | 165 | f(&mut builder); |
164 | Some(builder.finish()) | 166 | Some(builder.finish()) |
@@ -185,7 +187,29 @@ pub(crate) struct AssistBuilder { | |||
185 | source_change: SourceChange, | 187 | source_change: SourceChange, |
186 | 188 | ||
187 | /// Maps the original, immutable `SyntaxNode` to a `clone_for_update` twin. | 189 | /// Maps the original, immutable `SyntaxNode` to a `clone_for_update` twin. |
188 | mutated_tree: Option<(SyntaxNode, SyntaxNode)>, | 190 | mutated_tree: Option<TreeMutator>, |
191 | } | ||
192 | |||
193 | pub(crate) struct TreeMutator { | ||
194 | immutable: SyntaxNode, | ||
195 | mutable_clone: SyntaxNode, | ||
196 | } | ||
197 | |||
198 | impl TreeMutator { | ||
199 | pub(crate) fn new(immutable: &SyntaxNode) -> TreeMutator { | ||
200 | let immutable = immutable.ancestors().last().unwrap(); | ||
201 | let mutable_clone = immutable.clone_for_update(); | ||
202 | TreeMutator { immutable, mutable_clone } | ||
203 | } | ||
204 | |||
205 | pub(crate) fn make_mut<N: AstNode>(&self, node: &N) -> N { | ||
206 | N::cast(self.make_syntax_mut(node.syntax())).unwrap() | ||
207 | } | ||
208 | |||
209 | pub(crate) fn make_syntax_mut(&self, node: &SyntaxNode) -> SyntaxNode { | ||
210 | let ptr = SyntaxNodePtr::new(node); | ||
211 | ptr.to_node(&self.mutable_clone) | ||
212 | } | ||
189 | } | 213 | } |
190 | 214 | ||
191 | impl AssistBuilder { | 215 | impl AssistBuilder { |
@@ -204,8 +228,8 @@ impl AssistBuilder { | |||
204 | } | 228 | } |
205 | 229 | ||
206 | fn commit(&mut self) { | 230 | fn commit(&mut self) { |
207 | if let Some((old, new)) = self.mutated_tree.take() { | 231 | if let Some(tm) = self.mutated_tree.take() { |
208 | algo::diff(&old, &new).into_text_edit(&mut self.edit) | 232 | algo::diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit) |
209 | } | 233 | } |
210 | 234 | ||
211 | let edit = mem::take(&mut self.edit).finish(); | 235 | let edit = mem::take(&mut self.edit).finish(); |
@@ -228,16 +252,7 @@ impl AssistBuilder { | |||
228 | /// phase, and then get their mutable couterparts using `make_mut` in the | 252 | /// phase, and then get their mutable couterparts using `make_mut` in the |
229 | /// mutable state. | 253 | /// mutable state. |
230 | pub(crate) fn make_mut(&mut self, node: SyntaxNode) -> SyntaxNode { | 254 | pub(crate) fn make_mut(&mut self, node: SyntaxNode) -> SyntaxNode { |
231 | let root = &self | 255 | self.mutated_tree.get_or_insert_with(|| TreeMutator::new(&node)).make_syntax_mut(&node) |
232 | .mutated_tree | ||
233 | .get_or_insert_with(|| { | ||
234 | let immutable = node.ancestors().last().unwrap(); | ||
235 | let mutable = immutable.clone_for_update(); | ||
236 | (immutable, mutable) | ||
237 | }) | ||
238 | .1; | ||
239 | let ptr = SyntaxNodePtr::new(&&node); | ||
240 | ptr.to_node(root) | ||
241 | } | 256 | } |
242 | 257 | ||
243 | /// Remove specified `range` of text. | 258 | /// Remove specified `range` of text. |
diff --git a/crates/ide_assists/src/handlers/extract_function.rs b/crates/ide_assists/src/handlers/extract_function.rs index 5f80a40c8..93b28370c 100644 --- a/crates/ide_assists/src/handlers/extract_function.rs +++ b/crates/ide_assists/src/handlers/extract_function.rs | |||
@@ -16,12 +16,13 @@ use syntax::{ | |||
16 | edit::{AstNodeEdit, IndentLevel}, | 16 | edit::{AstNodeEdit, IndentLevel}, |
17 | AstNode, | 17 | AstNode, |
18 | }, | 18 | }, |
19 | ted, | ||
19 | SyntaxKind::{self, BLOCK_EXPR, BREAK_EXPR, COMMENT, PATH_EXPR, RETURN_EXPR}, | 20 | SyntaxKind::{self, BLOCK_EXPR, BREAK_EXPR, COMMENT, PATH_EXPR, RETURN_EXPR}, |
20 | SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, WalkEvent, T, | 21 | SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, WalkEvent, T, |
21 | }; | 22 | }; |
22 | 23 | ||
23 | use crate::{ | 24 | use crate::{ |
24 | assist_context::{AssistContext, Assists}, | 25 | assist_context::{AssistContext, Assists, TreeMutator}, |
25 | AssistId, | 26 | AssistId, |
26 | }; | 27 | }; |
27 | 28 | ||
@@ -1183,7 +1184,7 @@ fn make_ret_ty(ctx: &AssistContext, module: hir::Module, fun: &Function) -> Opti | |||
1183 | } | 1184 | } |
1184 | FlowHandler::Try { kind: TryKind::Result { ty: parent_ret_ty } } => { | 1185 | FlowHandler::Try { kind: TryKind::Result { ty: parent_ret_ty } } => { |
1185 | let handler_ty = parent_ret_ty | 1186 | let handler_ty = parent_ret_ty |
1186 | .type_parameters() | 1187 | .type_arguments() |
1187 | .nth(1) | 1188 | .nth(1) |
1188 | .map(|ty| make_ty(&ty, ctx, module)) | 1189 | .map(|ty| make_ty(&ty, ctx, module)) |
1189 | .unwrap_or_else(make::ty_unit); | 1190 | .unwrap_or_else(make::ty_unit); |
@@ -1366,7 +1367,10 @@ fn rewrite_body_segment( | |||
1366 | 1367 | ||
1367 | /// change all usages to account for added `&`/`&mut` for some params | 1368 | /// change all usages to account for added `&`/`&mut` for some params |
1368 | fn fix_param_usages(ctx: &AssistContext, params: &[Param], syntax: &SyntaxNode) -> SyntaxNode { | 1369 | fn fix_param_usages(ctx: &AssistContext, params: &[Param], syntax: &SyntaxNode) -> SyntaxNode { |
1369 | let mut rewriter = SyntaxRewriter::default(); | 1370 | let mut usages_for_param: Vec<(&Param, Vec<ast::Expr>)> = Vec::new(); |
1371 | |||
1372 | let tm = TreeMutator::new(syntax); | ||
1373 | |||
1370 | for param in params { | 1374 | for param in params { |
1371 | if !param.kind().is_ref() { | 1375 | if !param.kind().is_ref() { |
1372 | continue; | 1376 | continue; |
@@ -1376,30 +1380,39 @@ fn fix_param_usages(ctx: &AssistContext, params: &[Param], syntax: &SyntaxNode) | |||
1376 | let usages = usages | 1380 | let usages = usages |
1377 | .iter() | 1381 | .iter() |
1378 | .filter(|reference| syntax.text_range().contains_range(reference.range)) | 1382 | .filter(|reference| syntax.text_range().contains_range(reference.range)) |
1379 | .filter_map(|reference| path_element_of_reference(syntax, reference)); | 1383 | .filter_map(|reference| path_element_of_reference(syntax, reference)) |
1380 | for path in usages { | 1384 | .map(|expr| tm.make_mut(&expr)); |
1381 | match path.syntax().ancestors().skip(1).find_map(ast::Expr::cast) { | 1385 | |
1386 | usages_for_param.push((param, usages.collect())); | ||
1387 | } | ||
1388 | |||
1389 | let res = tm.make_syntax_mut(syntax); | ||
1390 | |||
1391 | for (param, usages) in usages_for_param { | ||
1392 | for usage in usages { | ||
1393 | match usage.syntax().ancestors().skip(1).find_map(ast::Expr::cast) { | ||
1382 | Some(ast::Expr::MethodCallExpr(_)) | Some(ast::Expr::FieldExpr(_)) => { | 1394 | Some(ast::Expr::MethodCallExpr(_)) | Some(ast::Expr::FieldExpr(_)) => { |
1383 | // do nothing | 1395 | // do nothing |
1384 | } | 1396 | } |
1385 | Some(ast::Expr::RefExpr(node)) | 1397 | Some(ast::Expr::RefExpr(node)) |
1386 | if param.kind() == ParamKind::MutRef && node.mut_token().is_some() => | 1398 | if param.kind() == ParamKind::MutRef && node.mut_token().is_some() => |
1387 | { | 1399 | { |
1388 | rewriter.replace_ast(&node.clone().into(), &node.expr().unwrap()); | 1400 | ted::replace(node.syntax(), node.expr().unwrap().syntax()); |
1389 | } | 1401 | } |
1390 | Some(ast::Expr::RefExpr(node)) | 1402 | Some(ast::Expr::RefExpr(node)) |
1391 | if param.kind() == ParamKind::SharedRef && node.mut_token().is_none() => | 1403 | if param.kind() == ParamKind::SharedRef && node.mut_token().is_none() => |
1392 | { | 1404 | { |
1393 | rewriter.replace_ast(&node.clone().into(), &node.expr().unwrap()); | 1405 | ted::replace(node.syntax(), node.expr().unwrap().syntax()); |
1394 | } | 1406 | } |
1395 | Some(_) | None => { | 1407 | Some(_) | None => { |
1396 | rewriter.replace_ast(&path, &make::expr_prefix(T![*], path.clone())); | 1408 | let p = &make::expr_prefix(T![*], usage.clone()).clone_for_update(); |
1409 | ted::replace(usage.syntax(), p.syntax()) | ||
1397 | } | 1410 | } |
1398 | }; | 1411 | } |
1399 | } | 1412 | } |
1400 | } | 1413 | } |
1401 | 1414 | ||
1402 | rewriter.rewrite(syntax) | 1415 | res |
1403 | } | 1416 | } |
1404 | 1417 | ||
1405 | fn update_external_control_flow(handler: &FlowHandler, syntax: &SyntaxNode) -> SyntaxNode { | 1418 | fn update_external_control_flow(handler: &FlowHandler, syntax: &SyntaxNode) -> SyntaxNode { |
diff --git a/crates/ide_assists/src/handlers/generate_from_impl_for_enum.rs b/crates/ide_assists/src/handlers/generate_from_impl_for_enum.rs index c13c6eebe..ce6998d82 100644 --- a/crates/ide_assists/src/handlers/generate_from_impl_for_enum.rs +++ b/crates/ide_assists/src/handlers/generate_from_impl_for_enum.rs | |||
@@ -91,7 +91,7 @@ fn existing_from_impl( | |||
91 | 91 | ||
92 | let enum_type = enum_.ty(sema.db); | 92 | let enum_type = enum_.ty(sema.db); |
93 | 93 | ||
94 | let wrapped_type = variant.fields(sema.db).get(0)?.signature_ty(sema.db); | 94 | let wrapped_type = variant.fields(sema.db).get(0)?.ty(sema.db); |
95 | 95 | ||
96 | if enum_type.impls_trait(sema.db, from_trait, &[wrapped_type]) { | 96 | if enum_type.impls_trait(sema.db, from_trait, &[wrapped_type]) { |
97 | Some(()) | 97 | Some(()) |
diff --git a/crates/ide_assists/src/handlers/reorder_impl.rs b/crates/ide_assists/src/handlers/reorder_impl.rs index 72d889248..54a9a468e 100644 --- a/crates/ide_assists/src/handlers/reorder_impl.rs +++ b/crates/ide_assists/src/handlers/reorder_impl.rs | |||
@@ -79,9 +79,12 @@ pub(crate) fn reorder_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()> | |||
79 | "Sort methods", | 79 | "Sort methods", |
80 | target, | 80 | target, |
81 | |builder| { | 81 | |builder| { |
82 | methods.into_iter().zip(sorted).for_each(|(old, new)| { | 82 | let methods = |
83 | ted::replace(builder.make_ast_mut(old).syntax(), new.clone_for_update().syntax()) | 83 | methods.into_iter().map(|fn_| builder.make_ast_mut(fn_)).collect::<Vec<_>>(); |
84 | }); | 84 | methods |
85 | .into_iter() | ||
86 | .zip(sorted) | ||
87 | .for_each(|(old, new)| ted::replace(old.syntax(), new.clone_for_update().syntax())); | ||
85 | }, | 88 | }, |
86 | ) | 89 | ) |
87 | } | 90 | } |
@@ -160,7 +163,7 @@ $0impl Bar for Foo {} | |||
160 | } | 163 | } |
161 | 164 | ||
162 | #[test] | 165 | #[test] |
163 | fn reorder_impl_trait_methods() { | 166 | fn reorder_impl_trait_functions() { |
164 | check_assist( | 167 | check_assist( |
165 | reorder_impl, | 168 | reorder_impl, |
166 | r#" | 169 | r#" |
@@ -197,4 +200,33 @@ impl Bar for Foo { | |||
197 | "#, | 200 | "#, |
198 | ) | 201 | ) |
199 | } | 202 | } |
203 | |||
204 | #[test] | ||
205 | fn reorder_impl_trait_methods_uneven_ident_lengths() { | ||
206 | check_assist( | ||
207 | reorder_impl, | ||
208 | r#" | ||
209 | trait Bar { | ||
210 | fn foo(&mut self) {} | ||
211 | fn fooo(&mut self) {} | ||
212 | } | ||
213 | |||
214 | struct Foo; | ||
215 | impl Bar for Foo { | ||
216 | fn fooo(&mut self) {} | ||
217 | fn foo(&mut self) {$0} | ||
218 | }"#, | ||
219 | r#" | ||
220 | trait Bar { | ||
221 | fn foo(&mut self) {} | ||
222 | fn fooo(&mut self) {} | ||
223 | } | ||
224 | |||
225 | struct Foo; | ||
226 | impl Bar for Foo { | ||
227 | fn foo(&mut self) {} | ||
228 | fn fooo(&mut self) {} | ||
229 | }"#, | ||
230 | ) | ||
231 | } | ||
200 | } | 232 | } |
diff --git a/crates/ide_assists/src/lib.rs b/crates/ide_assists/src/lib.rs index 88ae5c9a9..2e0c58504 100644 --- a/crates/ide_assists/src/lib.rs +++ b/crates/ide_assists/src/lib.rs | |||
@@ -17,6 +17,8 @@ mod tests; | |||
17 | pub mod utils; | 17 | pub mod utils; |
18 | pub mod ast_transform; | 18 | pub mod ast_transform; |
19 | 19 | ||
20 | use std::str::FromStr; | ||
21 | |||
20 | use hir::Semantics; | 22 | use hir::Semantics; |
21 | use ide_db::base_db::FileRange; | 23 | use ide_db::base_db::FileRange; |
22 | use ide_db::{label::Label, source_change::SourceChange, RootDatabase}; | 24 | use ide_db::{label::Label, source_change::SourceChange, RootDatabase}; |
@@ -56,6 +58,35 @@ impl AssistKind { | |||
56 | _ => return false, | 58 | _ => return false, |
57 | } | 59 | } |
58 | } | 60 | } |
61 | |||
62 | pub fn name(&self) -> &str { | ||
63 | match self { | ||
64 | AssistKind::None => "None", | ||
65 | AssistKind::QuickFix => "QuickFix", | ||
66 | AssistKind::Generate => "Generate", | ||
67 | AssistKind::Refactor => "Refactor", | ||
68 | AssistKind::RefactorExtract => "RefactorExtract", | ||
69 | AssistKind::RefactorInline => "RefactorInline", | ||
70 | AssistKind::RefactorRewrite => "RefactorRewrite", | ||
71 | } | ||
72 | } | ||
73 | } | ||
74 | |||
75 | impl FromStr for AssistKind { | ||
76 | type Err = String; | ||
77 | |||
78 | fn from_str(s: &str) -> Result<Self, Self::Err> { | ||
79 | match s { | ||
80 | "None" => Ok(AssistKind::None), | ||
81 | "QuickFix" => Ok(AssistKind::QuickFix), | ||
82 | "Generate" => Ok(AssistKind::Generate), | ||
83 | "Refactor" => Ok(AssistKind::Refactor), | ||
84 | "RefactorExtract" => Ok(AssistKind::RefactorExtract), | ||
85 | "RefactorInline" => Ok(AssistKind::RefactorInline), | ||
86 | "RefactorRewrite" => Ok(AssistKind::RefactorRewrite), | ||
87 | unknown => Err(format!("Unknown AssistKind: '{}'", unknown)), | ||
88 | } | ||
89 | } | ||
59 | } | 90 | } |
60 | 91 | ||
61 | /// Unique identifier of the assist, should not be shown to the user | 92 | /// Unique identifier of the assist, should not be shown to the user |
@@ -63,6 +94,41 @@ impl AssistKind { | |||
63 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] | 94 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] |
64 | pub struct AssistId(pub &'static str, pub AssistKind); | 95 | pub struct AssistId(pub &'static str, pub AssistKind); |
65 | 96 | ||
97 | /// A way to control how many asssist to resolve during the assist resolution. | ||
98 | /// When an assist is resolved, its edits are calculated that might be costly to always do by default. | ||
99 | #[derive(Debug)] | ||
100 | pub enum AssistResolveStrategy { | ||
101 | /// No assists should be resolved. | ||
102 | None, | ||
103 | /// All assists should be resolved. | ||
104 | All, | ||
105 | /// Only a certain assist should be resolved. | ||
106 | Single(SingleResolve), | ||
107 | } | ||
108 | |||
109 | /// Hold the [`AssistId`] data of a certain assist to resolve. | ||
110 | /// The original id object cannot be used due to a `'static` lifetime | ||
111 | /// and the requirement to construct this struct dynamically during the resolve handling. | ||
112 | #[derive(Debug)] | ||
113 | pub struct SingleResolve { | ||
114 | /// The id of the assist. | ||
115 | pub assist_id: String, | ||
116 | // The kind of the assist. | ||
117 | pub assist_kind: AssistKind, | ||
118 | } | ||
119 | |||
120 | impl AssistResolveStrategy { | ||
121 | pub fn should_resolve(&self, id: &AssistId) -> bool { | ||
122 | match self { | ||
123 | AssistResolveStrategy::None => false, | ||
124 | AssistResolveStrategy::All => true, | ||
125 | AssistResolveStrategy::Single(single_resolve) => { | ||
126 | single_resolve.assist_id == id.0 && single_resolve.assist_kind == id.1 | ||
127 | } | ||
128 | } | ||
129 | } | ||
130 | } | ||
131 | |||
66 | #[derive(Clone, Debug)] | 132 | #[derive(Clone, Debug)] |
67 | pub struct GroupLabel(pub String); | 133 | pub struct GroupLabel(pub String); |
68 | 134 | ||
@@ -91,7 +157,7 @@ impl Assist { | |||
91 | pub fn get( | 157 | pub fn get( |
92 | db: &RootDatabase, | 158 | db: &RootDatabase, |
93 | config: &AssistConfig, | 159 | config: &AssistConfig, |
94 | resolve: bool, | 160 | resolve: AssistResolveStrategy, |
95 | range: FileRange, | 161 | range: FileRange, |
96 | ) -> Vec<Assist> { | 162 | ) -> Vec<Assist> { |
97 | let sema = Semantics::new(db); | 163 | let sema = Semantics::new(db); |
diff --git a/crates/ide_assists/src/tests.rs b/crates/ide_assists/src/tests.rs index 6f4f97361..9c2847998 100644 --- a/crates/ide_assists/src/tests.rs +++ b/crates/ide_assists/src/tests.rs | |||
@@ -12,7 +12,10 @@ use stdx::{format_to, trim_indent}; | |||
12 | use syntax::TextRange; | 12 | use syntax::TextRange; |
13 | use test_utils::{assert_eq_text, extract_offset}; | 13 | use test_utils::{assert_eq_text, extract_offset}; |
14 | 14 | ||
15 | use crate::{handlers::Handler, Assist, AssistConfig, AssistContext, AssistKind, Assists}; | 15 | use crate::{ |
16 | handlers::Handler, Assist, AssistConfig, AssistContext, AssistKind, AssistResolveStrategy, | ||
17 | Assists, SingleResolve, | ||
18 | }; | ||
16 | 19 | ||
17 | pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig { | 20 | pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig { |
18 | snippet_cap: SnippetCap::new(true), | 21 | snippet_cap: SnippetCap::new(true), |
@@ -65,14 +68,14 @@ fn check_doc_test(assist_id: &str, before: &str, after: &str) { | |||
65 | let before = db.file_text(file_id).to_string(); | 68 | let before = db.file_text(file_id).to_string(); |
66 | let frange = FileRange { file_id, range: selection.into() }; | 69 | let frange = FileRange { file_id, range: selection.into() }; |
67 | 70 | ||
68 | let assist = Assist::get(&db, &TEST_CONFIG, true, frange) | 71 | let assist = Assist::get(&db, &TEST_CONFIG, AssistResolveStrategy::All, frange) |
69 | .into_iter() | 72 | .into_iter() |
70 | .find(|assist| assist.id.0 == assist_id) | 73 | .find(|assist| assist.id.0 == assist_id) |
71 | .unwrap_or_else(|| { | 74 | .unwrap_or_else(|| { |
72 | panic!( | 75 | panic!( |
73 | "\n\nAssist is not applicable: {}\nAvailable assists: {}", | 76 | "\n\nAssist is not applicable: {}\nAvailable assists: {}", |
74 | assist_id, | 77 | assist_id, |
75 | Assist::get(&db, &TEST_CONFIG, false, frange) | 78 | Assist::get(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange) |
76 | .into_iter() | 79 | .into_iter() |
77 | .map(|assist| assist.id.0) | 80 | .map(|assist| assist.id.0) |
78 | .collect::<Vec<_>>() | 81 | .collect::<Vec<_>>() |
@@ -108,7 +111,7 @@ fn check(handler: Handler, before: &str, expected: ExpectedResult, assist_label: | |||
108 | let sema = Semantics::new(&db); | 111 | let sema = Semantics::new(&db); |
109 | let config = TEST_CONFIG; | 112 | let config = TEST_CONFIG; |
110 | let ctx = AssistContext::new(sema, &config, frange); | 113 | let ctx = AssistContext::new(sema, &config, frange); |
111 | let mut acc = Assists::new(&ctx, true); | 114 | let mut acc = Assists::new(&ctx, AssistResolveStrategy::All); |
112 | handler(&mut acc, &ctx); | 115 | handler(&mut acc, &ctx); |
113 | let mut res = acc.finish(); | 116 | let mut res = acc.finish(); |
114 | 117 | ||
@@ -186,7 +189,7 @@ fn assist_order_field_struct() { | |||
186 | let (before_cursor_pos, before) = extract_offset(before); | 189 | let (before_cursor_pos, before) = extract_offset(before); |
187 | let (db, file_id) = with_single_file(&before); | 190 | let (db, file_id) = with_single_file(&before); |
188 | let frange = FileRange { file_id, range: TextRange::empty(before_cursor_pos) }; | 191 | let frange = FileRange { file_id, range: TextRange::empty(before_cursor_pos) }; |
189 | let assists = Assist::get(&db, &TEST_CONFIG, false, frange); | 192 | let assists = Assist::get(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange); |
190 | let mut assists = assists.iter(); | 193 | let mut assists = assists.iter(); |
191 | 194 | ||
192 | assert_eq!(assists.next().expect("expected assist").label, "Change visibility to pub(crate)"); | 195 | assert_eq!(assists.next().expect("expected assist").label, "Change visibility to pub(crate)"); |
@@ -211,7 +214,7 @@ pub fn test_some_range(a: int) -> bool { | |||
211 | "#, | 214 | "#, |
212 | ); | 215 | ); |
213 | 216 | ||
214 | let assists = Assist::get(&db, &TEST_CONFIG, false, frange); | 217 | let assists = Assist::get(&db, &TEST_CONFIG, AssistResolveStrategy::None, frange); |
215 | let expected = labels(&assists); | 218 | let expected = labels(&assists); |
216 | 219 | ||
217 | expect![[r#" | 220 | expect![[r#" |
@@ -240,7 +243,7 @@ pub fn test_some_range(a: int) -> bool { | |||
240 | let mut cfg = TEST_CONFIG; | 243 | let mut cfg = TEST_CONFIG; |
241 | cfg.allowed = Some(vec![AssistKind::Refactor]); | 244 | cfg.allowed = Some(vec![AssistKind::Refactor]); |
242 | 245 | ||
243 | let assists = Assist::get(&db, &cfg, false, frange); | 246 | let assists = Assist::get(&db, &cfg, AssistResolveStrategy::None, frange); |
244 | let expected = labels(&assists); | 247 | let expected = labels(&assists); |
245 | 248 | ||
246 | expect![[r#" | 249 | expect![[r#" |
@@ -255,7 +258,7 @@ pub fn test_some_range(a: int) -> bool { | |||
255 | { | 258 | { |
256 | let mut cfg = TEST_CONFIG; | 259 | let mut cfg = TEST_CONFIG; |
257 | cfg.allowed = Some(vec![AssistKind::RefactorExtract]); | 260 | cfg.allowed = Some(vec![AssistKind::RefactorExtract]); |
258 | let assists = Assist::get(&db, &cfg, false, frange); | 261 | let assists = Assist::get(&db, &cfg, AssistResolveStrategy::None, frange); |
259 | let expected = labels(&assists); | 262 | let expected = labels(&assists); |
260 | 263 | ||
261 | expect![[r#" | 264 | expect![[r#" |
@@ -268,9 +271,250 @@ pub fn test_some_range(a: int) -> bool { | |||
268 | { | 271 | { |
269 | let mut cfg = TEST_CONFIG; | 272 | let mut cfg = TEST_CONFIG; |
270 | cfg.allowed = Some(vec![AssistKind::QuickFix]); | 273 | cfg.allowed = Some(vec![AssistKind::QuickFix]); |
271 | let assists = Assist::get(&db, &cfg, false, frange); | 274 | let assists = Assist::get(&db, &cfg, AssistResolveStrategy::None, frange); |
272 | let expected = labels(&assists); | 275 | let expected = labels(&assists); |
273 | 276 | ||
274 | expect![[r#""#]].assert_eq(&expected); | 277 | expect![[r#""#]].assert_eq(&expected); |
275 | } | 278 | } |
276 | } | 279 | } |
280 | |||
281 | #[test] | ||
282 | fn various_resolve_strategies() { | ||
283 | let (db, frange) = RootDatabase::with_range( | ||
284 | r#" | ||
285 | pub fn test_some_range(a: int) -> bool { | ||
286 | if let 2..6 = $05$0 { | ||
287 | true | ||
288 | } else { | ||
289 | false | ||
290 | } | ||
291 | } | ||
292 | "#, | ||
293 | ); | ||
294 | |||
295 | let mut cfg = TEST_CONFIG; | ||
296 | cfg.allowed = Some(vec![AssistKind::RefactorExtract]); | ||
297 | |||
298 | { | ||
299 | let assists = Assist::get(&db, &cfg, AssistResolveStrategy::None, frange); | ||
300 | assert_eq!(2, assists.len()); | ||
301 | let mut assists = assists.into_iter(); | ||
302 | |||
303 | let extract_into_variable_assist = assists.next().unwrap(); | ||
304 | expect![[r#" | ||
305 | Assist { | ||
306 | id: AssistId( | ||
307 | "extract_variable", | ||
308 | RefactorExtract, | ||
309 | ), | ||
310 | label: "Extract into variable", | ||
311 | group: None, | ||
312 | target: 59..60, | ||
313 | source_change: None, | ||
314 | } | ||
315 | "#]] | ||
316 | .assert_debug_eq(&extract_into_variable_assist); | ||
317 | |||
318 | let extract_into_function_assist = assists.next().unwrap(); | ||
319 | expect![[r#" | ||
320 | Assist { | ||
321 | id: AssistId( | ||
322 | "extract_function", | ||
323 | RefactorExtract, | ||
324 | ), | ||
325 | label: "Extract into function", | ||
326 | group: None, | ||
327 | target: 59..60, | ||
328 | source_change: None, | ||
329 | } | ||
330 | "#]] | ||
331 | .assert_debug_eq(&extract_into_function_assist); | ||
332 | } | ||
333 | |||
334 | { | ||
335 | let assists = Assist::get( | ||
336 | &db, | ||
337 | &cfg, | ||
338 | AssistResolveStrategy::Single(SingleResolve { | ||
339 | assist_id: "SOMETHING_MISMATCHING".to_string(), | ||
340 | assist_kind: AssistKind::RefactorExtract, | ||
341 | }), | ||
342 | frange, | ||
343 | ); | ||
344 | assert_eq!(2, assists.len()); | ||
345 | let mut assists = assists.into_iter(); | ||
346 | |||
347 | let extract_into_variable_assist = assists.next().unwrap(); | ||
348 | expect![[r#" | ||
349 | Assist { | ||
350 | id: AssistId( | ||
351 | "extract_variable", | ||
352 | RefactorExtract, | ||
353 | ), | ||
354 | label: "Extract into variable", | ||
355 | group: None, | ||
356 | target: 59..60, | ||
357 | source_change: None, | ||
358 | } | ||
359 | "#]] | ||
360 | .assert_debug_eq(&extract_into_variable_assist); | ||
361 | |||
362 | let extract_into_function_assist = assists.next().unwrap(); | ||
363 | expect![[r#" | ||
364 | Assist { | ||
365 | id: AssistId( | ||
366 | "extract_function", | ||
367 | RefactorExtract, | ||
368 | ), | ||
369 | label: "Extract into function", | ||
370 | group: None, | ||
371 | target: 59..60, | ||
372 | source_change: None, | ||
373 | } | ||
374 | "#]] | ||
375 | .assert_debug_eq(&extract_into_function_assist); | ||
376 | } | ||
377 | |||
378 | { | ||
379 | let assists = Assist::get( | ||
380 | &db, | ||
381 | &cfg, | ||
382 | AssistResolveStrategy::Single(SingleResolve { | ||
383 | assist_id: "extract_variable".to_string(), | ||
384 | assist_kind: AssistKind::RefactorExtract, | ||
385 | }), | ||
386 | frange, | ||
387 | ); | ||
388 | assert_eq!(2, assists.len()); | ||
389 | let mut assists = assists.into_iter(); | ||
390 | |||
391 | let extract_into_variable_assist = assists.next().unwrap(); | ||
392 | expect![[r#" | ||
393 | Assist { | ||
394 | id: AssistId( | ||
395 | "extract_variable", | ||
396 | RefactorExtract, | ||
397 | ), | ||
398 | label: "Extract into variable", | ||
399 | group: None, | ||
400 | target: 59..60, | ||
401 | source_change: Some( | ||
402 | SourceChange { | ||
403 | source_file_edits: { | ||
404 | FileId( | ||
405 | 0, | ||
406 | ): TextEdit { | ||
407 | indels: [ | ||
408 | Indel { | ||
409 | insert: "let $0var_name = 5;\n ", | ||
410 | delete: 45..45, | ||
411 | }, | ||
412 | Indel { | ||
413 | insert: "var_name", | ||
414 | delete: 59..60, | ||
415 | }, | ||
416 | ], | ||
417 | }, | ||
418 | }, | ||
419 | file_system_edits: [], | ||
420 | is_snippet: true, | ||
421 | }, | ||
422 | ), | ||
423 | } | ||
424 | "#]] | ||
425 | .assert_debug_eq(&extract_into_variable_assist); | ||
426 | |||
427 | let extract_into_function_assist = assists.next().unwrap(); | ||
428 | expect![[r#" | ||
429 | Assist { | ||
430 | id: AssistId( | ||
431 | "extract_function", | ||
432 | RefactorExtract, | ||
433 | ), | ||
434 | label: "Extract into function", | ||
435 | group: None, | ||
436 | target: 59..60, | ||
437 | source_change: None, | ||
438 | } | ||
439 | "#]] | ||
440 | .assert_debug_eq(&extract_into_function_assist); | ||
441 | } | ||
442 | |||
443 | { | ||
444 | let assists = Assist::get(&db, &cfg, AssistResolveStrategy::All, frange); | ||
445 | assert_eq!(2, assists.len()); | ||
446 | let mut assists = assists.into_iter(); | ||
447 | |||
448 | let extract_into_variable_assist = assists.next().unwrap(); | ||
449 | expect![[r#" | ||
450 | Assist { | ||
451 | id: AssistId( | ||
452 | "extract_variable", | ||
453 | RefactorExtract, | ||
454 | ), | ||
455 | label: "Extract into variable", | ||
456 | group: None, | ||
457 | target: 59..60, | ||
458 | source_change: Some( | ||
459 | SourceChange { | ||
460 | source_file_edits: { | ||
461 | FileId( | ||
462 | 0, | ||
463 | ): TextEdit { | ||
464 | indels: [ | ||
465 | Indel { | ||
466 | insert: "let $0var_name = 5;\n ", | ||
467 | delete: 45..45, | ||
468 | }, | ||
469 | Indel { | ||
470 | insert: "var_name", | ||
471 | delete: 59..60, | ||
472 | }, | ||
473 | ], | ||
474 | }, | ||
475 | }, | ||
476 | file_system_edits: [], | ||
477 | is_snippet: true, | ||
478 | }, | ||
479 | ), | ||
480 | } | ||
481 | "#]] | ||
482 | .assert_debug_eq(&extract_into_variable_assist); | ||
483 | |||
484 | let extract_into_function_assist = assists.next().unwrap(); | ||
485 | expect![[r#" | ||
486 | Assist { | ||
487 | id: AssistId( | ||
488 | "extract_function", | ||
489 | RefactorExtract, | ||
490 | ), | ||
491 | label: "Extract into function", | ||
492 | group: None, | ||
493 | target: 59..60, | ||
494 | source_change: Some( | ||
495 | SourceChange { | ||
496 | source_file_edits: { | ||
497 | FileId( | ||
498 | 0, | ||
499 | ): TextEdit { | ||
500 | indels: [ | ||
501 | Indel { | ||
502 | insert: "fun_name()", | ||
503 | delete: 59..60, | ||
504 | }, | ||
505 | Indel { | ||
506 | insert: "\n\nfn $0fun_name() -> i32 {\n 5\n}", | ||
507 | delete: 110..110, | ||
508 | }, | ||
509 | ], | ||
510 | }, | ||
511 | }, | ||
512 | file_system_edits: [], | ||
513 | is_snippet: true, | ||
514 | }, | ||
515 | ), | ||
516 | } | ||
517 | "#]] | ||
518 | .assert_debug_eq(&extract_into_function_assist); | ||
519 | } | ||
520 | } | ||
diff --git a/crates/ide_assists/src/utils/suggest_name.rs b/crates/ide_assists/src/utils/suggest_name.rs index 533624c1f..deafcd630 100644 --- a/crates/ide_assists/src/utils/suggest_name.rs +++ b/crates/ide_assists/src/utils/suggest_name.rs | |||
@@ -227,7 +227,7 @@ fn name_of_type(ty: &hir::Type, db: &RootDatabase) -> Option<String> { | |||
227 | let name = adt.name(db).to_string(); | 227 | let name = adt.name(db).to_string(); |
228 | 228 | ||
229 | if WRAPPER_TYPES.contains(&name.as_str()) { | 229 | if WRAPPER_TYPES.contains(&name.as_str()) { |
230 | let inner_ty = ty.type_parameters().next()?; | 230 | let inner_ty = ty.type_arguments().next()?; |
231 | return name_of_type(&inner_ty, db); | 231 | return name_of_type(&inner_ty, db); |
232 | } | 232 | } |
233 | 233 | ||
diff --git a/crates/ide_completion/src/completions.rs b/crates/ide_completion/src/completions.rs index e2994eed4..78154bf3e 100644 --- a/crates/ide_completion/src/completions.rs +++ b/crates/ide_completion/src/completions.rs | |||
@@ -203,41 +203,37 @@ impl Completions { | |||
203 | fn complete_enum_variants( | 203 | fn complete_enum_variants( |
204 | acc: &mut Completions, | 204 | acc: &mut Completions, |
205 | ctx: &CompletionContext, | 205 | ctx: &CompletionContext, |
206 | ty: &hir::Type, | 206 | enum_data: hir::Enum, |
207 | cb: impl Fn(&mut Completions, &CompletionContext, hir::Variant, hir::ModPath), | 207 | cb: impl Fn(&mut Completions, &CompletionContext, hir::Variant, hir::ModPath), |
208 | ) { | 208 | ) { |
209 | if let Some(hir::Adt::Enum(enum_data)) = | 209 | let variants = enum_data.variants(ctx.db); |
210 | iter::successors(Some(ty.clone()), |ty| ty.remove_ref()).last().and_then(|ty| ty.as_adt()) | 210 | |
211 | { | 211 | let module = if let Some(module) = ctx.scope.module() { |
212 | let variants = enum_data.variants(ctx.db); | 212 | // Compute path from the completion site if available. |
213 | 213 | module | |
214 | let module = if let Some(module) = ctx.scope.module() { | 214 | } else { |
215 | // Compute path from the completion site if available. | 215 | // Otherwise fall back to the enum's definition site. |
216 | module | 216 | enum_data.module(ctx.db) |
217 | } else { | 217 | }; |
218 | // Otherwise fall back to the enum's definition site. | 218 | |
219 | enum_data.module(ctx.db) | 219 | if let Some(impl_) = ctx.impl_def.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) { |
220 | }; | 220 | if impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_data)) { |
221 | 221 | for &variant in &variants { | |
222 | if let Some(impl_) = ctx.impl_def.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) { | 222 | let self_path = hir::ModPath::from_segments( |
223 | if impl_.self_ty(ctx.db) == *ty { | 223 | hir::PathKind::Plain, |
224 | for &variant in &variants { | 224 | iter::once(known::SELF_TYPE).chain(iter::once(variant.name(ctx.db))), |
225 | let self_path = hir::ModPath::from_segments( | 225 | ); |
226 | hir::PathKind::Plain, | 226 | cb(acc, ctx, variant, self_path); |
227 | iter::once(known::SELF_TYPE).chain(iter::once(variant.name(ctx.db))), | ||
228 | ); | ||
229 | cb(acc, ctx, variant, self_path); | ||
230 | } | ||
231 | } | 227 | } |
232 | } | 228 | } |
229 | } | ||
233 | 230 | ||
234 | for variant in variants { | 231 | for variant in variants { |
235 | if let Some(path) = module.find_use_path(ctx.db, hir::ModuleDef::from(variant)) { | 232 | if let Some(path) = module.find_use_path(ctx.db, hir::ModuleDef::from(variant)) { |
236 | // Variants with trivial paths are already added by the existing completion logic, | 233 | // Variants with trivial paths are already added by the existing completion logic, |
237 | // so we should avoid adding these twice | 234 | // so we should avoid adding these twice |
238 | if path.segments().len() > 1 { | 235 | if path.segments().len() > 1 { |
239 | cb(acc, ctx, variant, path); | 236 | cb(acc, ctx, variant, path); |
240 | } | ||
241 | } | 237 | } |
242 | } | 238 | } |
243 | } | 239 | } |
diff --git a/crates/ide_completion/src/completions/pattern.rs b/crates/ide_completion/src/completions/pattern.rs index 808d7ff7e..8dc9ab73c 100644 --- a/crates/ide_completion/src/completions/pattern.rs +++ b/crates/ide_completion/src/completions/pattern.rs | |||
@@ -12,8 +12,10 @@ pub(crate) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) { | |||
12 | } | 12 | } |
13 | 13 | ||
14 | if !ctx.is_irrefutable_pat_binding { | 14 | if !ctx.is_irrefutable_pat_binding { |
15 | if let Some(ty) = ctx.expected_type.as_ref() { | 15 | if let Some(hir::Adt::Enum(e)) = |
16 | super::complete_enum_variants(acc, ctx, ty, |acc, ctx, variant, path| { | 16 | ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt()) |
17 | { | ||
18 | super::complete_enum_variants(acc, ctx, e, |acc, ctx, variant, path| { | ||
17 | acc.add_qualified_variant_pat(ctx, variant, path.clone()); | 19 | acc.add_qualified_variant_pat(ctx, variant, path.clone()); |
18 | acc.add_qualified_enum_variant(ctx, variant, path); | 20 | acc.add_qualified_enum_variant(ctx, variant, path); |
19 | }); | 21 | }); |
diff --git a/crates/ide_completion/src/completions/postfix.rs b/crates/ide_completion/src/completions/postfix.rs index ac69b720a..962aaf0df 100644 --- a/crates/ide_completion/src/completions/postfix.rs +++ b/crates/ide_completion/src/completions/postfix.rs | |||
@@ -35,14 +35,11 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { | |||
35 | None => return, | 35 | None => return, |
36 | }; | 36 | }; |
37 | 37 | ||
38 | let ref_removed_ty = | ||
39 | std::iter::successors(Some(receiver_ty.clone()), |ty| ty.remove_ref()).last().unwrap(); | ||
40 | |||
41 | let cap = match ctx.config.snippet_cap { | 38 | let cap = match ctx.config.snippet_cap { |
42 | Some(it) => it, | 39 | Some(it) => it, |
43 | None => return, | 40 | None => return, |
44 | }; | 41 | }; |
45 | let try_enum = TryEnum::from_ty(&ctx.sema, &ref_removed_ty); | 42 | let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty.strip_references()); |
46 | if let Some(try_enum) = &try_enum { | 43 | if let Some(try_enum) = &try_enum { |
47 | match try_enum { | 44 | match try_enum { |
48 | TryEnum::Result => { | 45 | TryEnum::Result => { |
diff --git a/crates/ide_completion/src/completions/postfix/format_like.rs b/crates/ide_completion/src/completions/postfix/format_like.rs index e86ffa8f8..0dcb3e898 100644 --- a/crates/ide_completion/src/completions/postfix/format_like.rs +++ b/crates/ide_completion/src/completions/postfix/format_like.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | // Feature: Format String Completion. | 1 | // Feature: Format String Completion |
2 | // | 2 | // |
3 | // `"Result {result} is {2 + 2}"` is expanded to the `"Result {} is {}", result, 2 + 2`. | 3 | // `"Result {result} is {2 + 2}"` is expanded to the `"Result {} is {}", result, 2 + 2`. |
4 | // | 4 | // |
diff --git a/crates/ide_completion/src/completions/qualified_path.rs b/crates/ide_completion/src/completions/qualified_path.rs index 969249df6..eedb44873 100644 --- a/crates/ide_completion/src/completions/qualified_path.rs +++ b/crates/ide_completion/src/completions/qualified_path.rs | |||
@@ -52,18 +52,24 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon | |||
52 | | PathResolution::Def(def @ hir::ModuleDef::TypeAlias(_)) | 52 | | PathResolution::Def(def @ hir::ModuleDef::TypeAlias(_)) |
53 | | PathResolution::Def(def @ hir::ModuleDef::BuiltinType(_)) => { | 53 | | PathResolution::Def(def @ hir::ModuleDef::BuiltinType(_)) => { |
54 | if let hir::ModuleDef::Adt(Adt::Enum(e)) = def { | 54 | if let hir::ModuleDef::Adt(Adt::Enum(e)) = def { |
55 | for variant in e.variants(ctx.db) { | 55 | add_enum_variants(ctx, acc, e); |
56 | acc.add_enum_variant(ctx, variant, None); | ||
57 | } | ||
58 | } | 56 | } |
59 | let ty = match def { | 57 | let ty = match def { |
60 | hir::ModuleDef::Adt(adt) => adt.ty(ctx.db), | 58 | hir::ModuleDef::Adt(adt) => adt.ty(ctx.db), |
61 | hir::ModuleDef::TypeAlias(a) => a.ty(ctx.db), | 59 | hir::ModuleDef::TypeAlias(a) => { |
60 | let ty = a.ty(ctx.db); | ||
61 | if let Some(Adt::Enum(e)) = ty.as_adt() { | ||
62 | cov_mark::hit!(completes_variant_through_alias); | ||
63 | add_enum_variants(ctx, acc, e); | ||
64 | } | ||
65 | ty | ||
66 | } | ||
62 | hir::ModuleDef::BuiltinType(builtin) => { | 67 | hir::ModuleDef::BuiltinType(builtin) => { |
63 | let module = match ctx.scope.module() { | 68 | let module = match ctx.scope.module() { |
64 | Some(it) => it, | 69 | Some(it) => it, |
65 | None => return, | 70 | None => return, |
66 | }; | 71 | }; |
72 | cov_mark::hit!(completes_primitive_assoc_const); | ||
67 | builtin.ty(ctx.db, module) | 73 | builtin.ty(ctx.db, module) |
68 | } | 74 | } |
69 | _ => unreachable!(), | 75 | _ => unreachable!(), |
@@ -92,9 +98,8 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon | |||
92 | if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { | 98 | if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { |
93 | return None; | 99 | return None; |
94 | } | 100 | } |
95 | match item { | 101 | if let hir::AssocItem::TypeAlias(ty) = item { |
96 | hir::AssocItem::Function(_) | hir::AssocItem::Const(_) => {} | 102 | acc.add_type_alias(ctx, ty) |
97 | hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), | ||
98 | } | 103 | } |
99 | None::<()> | 104 | None::<()> |
100 | }); | 105 | }); |
@@ -122,9 +127,7 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon | |||
122 | }; | 127 | }; |
123 | 128 | ||
124 | if let Some(Adt::Enum(e)) = ty.as_adt() { | 129 | if let Some(Adt::Enum(e)) = ty.as_adt() { |
125 | for variant in e.variants(ctx.db) { | 130 | add_enum_variants(ctx, acc, e); |
126 | acc.add_enum_variant(ctx, variant, None); | ||
127 | } | ||
128 | } | 131 | } |
129 | 132 | ||
130 | let traits_in_scope = ctx.scope.traits_in_scope(); | 133 | let traits_in_scope = ctx.scope.traits_in_scope(); |
@@ -151,6 +154,12 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon | |||
151 | } | 154 | } |
152 | } | 155 | } |
153 | 156 | ||
157 | fn add_enum_variants(ctx: &CompletionContext, acc: &mut Completions, e: hir::Enum) { | ||
158 | for variant in e.variants(ctx.db) { | ||
159 | acc.add_enum_variant(ctx, variant, None); | ||
160 | } | ||
161 | } | ||
162 | |||
154 | #[cfg(test)] | 163 | #[cfg(test)] |
155 | mod tests { | 164 | mod tests { |
156 | use expect_test::{expect, Expect}; | 165 | use expect_test::{expect, Expect}; |
@@ -737,29 +746,7 @@ fn f() {} | |||
737 | } | 746 | } |
738 | 747 | ||
739 | #[test] | 748 | #[test] |
740 | fn completes_function() { | 749 | fn completes_variant_through_self() { |
741 | check( | ||
742 | r#" | ||
743 | fn foo( | ||
744 | a: i32, | ||
745 | b: i32 | ||
746 | ) { | ||
747 | |||
748 | } | ||
749 | |||
750 | fn main() { | ||
751 | fo$0 | ||
752 | } | ||
753 | "#, | ||
754 | expect![[r#" | ||
755 | fn main() fn() | ||
756 | fn foo(…) fn(i32, i32) | ||
757 | "#]], | ||
758 | ); | ||
759 | } | ||
760 | |||
761 | #[test] | ||
762 | fn completes_self_enum() { | ||
763 | check( | 750 | check( |
764 | r#" | 751 | r#" |
765 | enum Foo { | 752 | enum Foo { |
@@ -783,6 +770,7 @@ impl Foo { | |||
783 | 770 | ||
784 | #[test] | 771 | #[test] |
785 | fn completes_primitive_assoc_const() { | 772 | fn completes_primitive_assoc_const() { |
773 | cov_mark::check!(completes_primitive_assoc_const); | ||
786 | check( | 774 | check( |
787 | r#" | 775 | r#" |
788 | //- /lib.rs crate:lib deps:core | 776 | //- /lib.rs crate:lib deps:core |
@@ -804,4 +792,23 @@ impl u8 { | |||
804 | "#]], | 792 | "#]], |
805 | ); | 793 | ); |
806 | } | 794 | } |
795 | |||
796 | #[test] | ||
797 | fn completes_variant_through_alias() { | ||
798 | cov_mark::check!(completes_variant_through_alias); | ||
799 | check( | ||
800 | r#" | ||
801 | enum Foo { | ||
802 | Bar | ||
803 | } | ||
804 | type Foo2 = Foo; | ||
805 | fn main() { | ||
806 | Foo2::$0 | ||
807 | } | ||
808 | "#, | ||
809 | expect![[r#" | ||
810 | ev Bar () | ||
811 | "#]], | ||
812 | ); | ||
813 | } | ||
807 | } | 814 | } |
diff --git a/crates/ide_completion/src/completions/unqualified_path.rs b/crates/ide_completion/src/completions/unqualified_path.rs index 1b8b063e7..7875500c1 100644 --- a/crates/ide_completion/src/completions/unqualified_path.rs +++ b/crates/ide_completion/src/completions/unqualified_path.rs | |||
@@ -17,8 +17,10 @@ pub(crate) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionC | |||
17 | return; | 17 | return; |
18 | } | 18 | } |
19 | 19 | ||
20 | if let Some(ty) = &ctx.expected_type { | 20 | if let Some(hir::Adt::Enum(e)) = |
21 | super::complete_enum_variants(acc, ctx, ty, |acc, ctx, variant, path| { | 21 | ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt()) |
22 | { | ||
23 | super::complete_enum_variants(acc, ctx, e, |acc, ctx, variant, path| { | ||
22 | acc.add_qualified_enum_variant(ctx, variant, path) | 24 | acc.add_qualified_enum_variant(ctx, variant, path) |
23 | }); | 25 | }); |
24 | } | 26 | } |
diff --git a/crates/ide_completion/src/context.rs b/crates/ide_completion/src/context.rs index 32f81aec1..f3fcb712c 100644 --- a/crates/ide_completion/src/context.rs +++ b/crates/ide_completion/src/context.rs | |||
@@ -301,103 +301,108 @@ impl<'a> CompletionContext<'a> { | |||
301 | .find_map(ast::Impl::cast); | 301 | .find_map(ast::Impl::cast); |
302 | } | 302 | } |
303 | 303 | ||
304 | fn expected_type_and_name(&self) -> (Option<Type>, Option<NameOrNameRef>) { | ||
305 | let mut node = match self.token.parent() { | ||
306 | Some(it) => it, | ||
307 | None => return (None, None), | ||
308 | }; | ||
309 | loop { | ||
310 | break match_ast! { | ||
311 | match node { | ||
312 | ast::LetStmt(it) => { | ||
313 | cov_mark::hit!(expected_type_let_with_leading_char); | ||
314 | cov_mark::hit!(expected_type_let_without_leading_char); | ||
315 | let ty = it.pat() | ||
316 | .and_then(|pat| self.sema.type_of_pat(&pat)); | ||
317 | let name = if let Some(ast::Pat::IdentPat(ident)) = it.pat() { | ||
318 | ident.name().map(NameOrNameRef::Name) | ||
319 | } else { | ||
320 | None | ||
321 | }; | ||
322 | |||
323 | (ty, name) | ||
324 | }, | ||
325 | ast::ArgList(_it) => { | ||
326 | cov_mark::hit!(expected_type_fn_param_with_leading_char); | ||
327 | cov_mark::hit!(expected_type_fn_param_without_leading_char); | ||
328 | ActiveParameter::at_token( | ||
329 | &self.sema, | ||
330 | self.token.clone(), | ||
331 | ).map(|ap| { | ||
332 | let name = ap.ident().map(NameOrNameRef::Name); | ||
333 | (Some(ap.ty), name) | ||
334 | }) | ||
335 | .unwrap_or((None, None)) | ||
336 | }, | ||
337 | ast::RecordExprFieldList(_it) => { | ||
338 | cov_mark::hit!(expected_type_struct_field_without_leading_char); | ||
339 | self.token.prev_sibling_or_token() | ||
340 | .and_then(|se| se.into_node()) | ||
341 | .and_then(|node| ast::RecordExprField::cast(node)) | ||
342 | .and_then(|rf| self.sema.resolve_record_field(&rf).zip(Some(rf))) | ||
343 | .map(|(f, rf)|( | ||
344 | Some(f.0.ty(self.db)), | ||
345 | rf.field_name().map(NameOrNameRef::NameRef), | ||
346 | )) | ||
347 | .unwrap_or((None, None)) | ||
348 | }, | ||
349 | ast::RecordExprField(it) => { | ||
350 | cov_mark::hit!(expected_type_struct_field_with_leading_char); | ||
351 | self.sema | ||
352 | .resolve_record_field(&it) | ||
353 | .map(|f|( | ||
354 | Some(f.0.ty(self.db)), | ||
355 | it.field_name().map(NameOrNameRef::NameRef), | ||
356 | )) | ||
357 | .unwrap_or((None, None)) | ||
358 | }, | ||
359 | ast::MatchExpr(it) => { | ||
360 | cov_mark::hit!(expected_type_match_arm_without_leading_char); | ||
361 | let ty = it.expr() | ||
362 | .and_then(|e| self.sema.type_of_expr(&e)); | ||
363 | (ty, None) | ||
364 | }, | ||
365 | ast::IfExpr(it) => { | ||
366 | cov_mark::hit!(expected_type_if_let_without_leading_char); | ||
367 | let ty = it.condition() | ||
368 | .and_then(|cond| cond.expr()) | ||
369 | .and_then(|e| self.sema.type_of_expr(&e)); | ||
370 | (ty, None) | ||
371 | }, | ||
372 | ast::IdentPat(it) => { | ||
373 | cov_mark::hit!(expected_type_if_let_with_leading_char); | ||
374 | cov_mark::hit!(expected_type_match_arm_with_leading_char); | ||
375 | let ty = self.sema.type_of_pat(&ast::Pat::from(it)); | ||
376 | (ty, None) | ||
377 | }, | ||
378 | ast::Fn(it) => { | ||
379 | cov_mark::hit!(expected_type_fn_ret_with_leading_char); | ||
380 | cov_mark::hit!(expected_type_fn_ret_without_leading_char); | ||
381 | let def = self.sema.to_def(&it); | ||
382 | (def.map(|def| def.ret_type(self.db)), None) | ||
383 | }, | ||
384 | ast::Stmt(it) => (None, None), | ||
385 | _ => { | ||
386 | match node.parent() { | ||
387 | Some(n) => { | ||
388 | node = n; | ||
389 | continue; | ||
390 | }, | ||
391 | None => (None, None), | ||
392 | } | ||
393 | }, | ||
394 | } | ||
395 | }; | ||
396 | } | ||
397 | } | ||
398 | |||
304 | fn fill( | 399 | fn fill( |
305 | &mut self, | 400 | &mut self, |
306 | original_file: &SyntaxNode, | 401 | original_file: &SyntaxNode, |
307 | file_with_fake_ident: SyntaxNode, | 402 | file_with_fake_ident: SyntaxNode, |
308 | offset: TextSize, | 403 | offset: TextSize, |
309 | ) { | 404 | ) { |
310 | let (expected_type, expected_name) = { | 405 | let (expected_type, expected_name) = self.expected_type_and_name(); |
311 | let mut node = match self.token.parent() { | ||
312 | Some(it) => it, | ||
313 | None => return, | ||
314 | }; | ||
315 | loop { | ||
316 | break match_ast! { | ||
317 | match node { | ||
318 | ast::LetStmt(it) => { | ||
319 | cov_mark::hit!(expected_type_let_with_leading_char); | ||
320 | cov_mark::hit!(expected_type_let_without_leading_char); | ||
321 | let ty = it.pat() | ||
322 | .and_then(|pat| self.sema.type_of_pat(&pat)); | ||
323 | let name = if let Some(ast::Pat::IdentPat(ident)) = it.pat() { | ||
324 | ident.name().map(NameOrNameRef::Name) | ||
325 | } else { | ||
326 | None | ||
327 | }; | ||
328 | |||
329 | (ty, name) | ||
330 | }, | ||
331 | ast::ArgList(_it) => { | ||
332 | cov_mark::hit!(expected_type_fn_param_with_leading_char); | ||
333 | cov_mark::hit!(expected_type_fn_param_without_leading_char); | ||
334 | ActiveParameter::at_token( | ||
335 | &self.sema, | ||
336 | self.token.clone(), | ||
337 | ).map(|ap| { | ||
338 | let name = ap.ident().map(NameOrNameRef::Name); | ||
339 | (Some(ap.ty), name) | ||
340 | }) | ||
341 | .unwrap_or((None, None)) | ||
342 | }, | ||
343 | ast::RecordExprFieldList(_it) => { | ||
344 | cov_mark::hit!(expected_type_struct_field_without_leading_char); | ||
345 | self.token.prev_sibling_or_token() | ||
346 | .and_then(|se| se.into_node()) | ||
347 | .and_then(|node| ast::RecordExprField::cast(node)) | ||
348 | .and_then(|rf| self.sema.resolve_record_field(&rf).zip(Some(rf))) | ||
349 | .map(|(f, rf)|( | ||
350 | Some(f.0.signature_ty(self.db)), | ||
351 | rf.field_name().map(NameOrNameRef::NameRef), | ||
352 | )) | ||
353 | .unwrap_or((None, None)) | ||
354 | }, | ||
355 | ast::RecordExprField(it) => { | ||
356 | cov_mark::hit!(expected_type_struct_field_with_leading_char); | ||
357 | self.sema | ||
358 | .resolve_record_field(&it) | ||
359 | .map(|f|( | ||
360 | Some(f.0.signature_ty(self.db)), | ||
361 | it.field_name().map(NameOrNameRef::NameRef), | ||
362 | )) | ||
363 | .unwrap_or((None, None)) | ||
364 | }, | ||
365 | ast::MatchExpr(it) => { | ||
366 | cov_mark::hit!(expected_type_match_arm_without_leading_char); | ||
367 | let ty = it.expr() | ||
368 | .and_then(|e| self.sema.type_of_expr(&e)); | ||
369 | |||
370 | (ty, None) | ||
371 | }, | ||
372 | ast::IdentPat(it) => { | ||
373 | cov_mark::hit!(expected_type_if_let_with_leading_char); | ||
374 | cov_mark::hit!(expected_type_match_arm_with_leading_char); | ||
375 | let ty = self.sema.type_of_pat(&ast::Pat::from(it)); | ||
376 | |||
377 | (ty, None) | ||
378 | }, | ||
379 | ast::Fn(_it) => { | ||
380 | cov_mark::hit!(expected_type_fn_ret_with_leading_char); | ||
381 | cov_mark::hit!(expected_type_fn_ret_without_leading_char); | ||
382 | let ty = self.token.ancestors() | ||
383 | .find_map(|ancestor| ast::Expr::cast(ancestor)) | ||
384 | .and_then(|expr| self.sema.type_of_expr(&expr)); | ||
385 | |||
386 | (ty, None) | ||
387 | }, | ||
388 | _ => { | ||
389 | match node.parent() { | ||
390 | Some(n) => { | ||
391 | node = n; | ||
392 | continue; | ||
393 | }, | ||
394 | None => (None, None), | ||
395 | } | ||
396 | }, | ||
397 | } | ||
398 | }; | ||
399 | } | ||
400 | }; | ||
401 | self.expected_type = expected_type; | 406 | self.expected_type = expected_type; |
402 | self.expected_name = expected_name; | 407 | self.expected_name = expected_name; |
403 | self.attribute_under_caret = find_node_at_offset(&file_with_fake_ident, offset); | 408 | self.attribute_under_caret = find_node_at_offset(&file_with_fake_ident, offset); |
@@ -802,6 +807,7 @@ fn foo() { | |||
802 | 807 | ||
803 | #[test] | 808 | #[test] |
804 | fn expected_type_if_let_without_leading_char() { | 809 | fn expected_type_if_let_without_leading_char() { |
810 | cov_mark::check!(expected_type_if_let_without_leading_char); | ||
805 | check_expected_type_and_name( | 811 | check_expected_type_and_name( |
806 | r#" | 812 | r#" |
807 | enum Foo { Bar, Baz, Quux } | 813 | enum Foo { Bar, Baz, Quux } |
@@ -811,8 +817,8 @@ fn foo() { | |||
811 | if let $0 = f { } | 817 | if let $0 = f { } |
812 | } | 818 | } |
813 | "#, | 819 | "#, |
814 | expect![[r#"ty: (), name: ?"#]], | 820 | expect![[r#"ty: Foo, name: ?"#]], |
815 | ) // FIXME should be `ty: u32, name: ?` | 821 | ) |
816 | } | 822 | } |
817 | 823 | ||
818 | #[test] | 824 | #[test] |
@@ -840,8 +846,8 @@ fn foo() -> u32 { | |||
840 | $0 | 846 | $0 |
841 | } | 847 | } |
842 | "#, | 848 | "#, |
843 | expect![[r#"ty: (), name: ?"#]], | 849 | expect![[r#"ty: u32, name: ?"#]], |
844 | ) // FIXME this should be `ty: u32, name: ?` | 850 | ) |
845 | } | 851 | } |
846 | 852 | ||
847 | #[test] | 853 | #[test] |
@@ -856,4 +862,16 @@ fn foo() -> u32 { | |||
856 | expect![[r#"ty: u32, name: ?"#]], | 862 | expect![[r#"ty: u32, name: ?"#]], |
857 | ) | 863 | ) |
858 | } | 864 | } |
865 | |||
866 | #[test] | ||
867 | fn expected_type_fn_ret_fn_ref_fully_typed() { | ||
868 | check_expected_type_and_name( | ||
869 | r#" | ||
870 | fn foo() -> u32 { | ||
871 | foo$0 | ||
872 | } | ||
873 | "#, | ||
874 | expect![[r#"ty: u32, name: ?"#]], | ||
875 | ) | ||
876 | } | ||
859 | } | 877 | } |
diff --git a/crates/ide_completion/src/render/enum_variant.rs b/crates/ide_completion/src/render/enum_variant.rs index 832f5ced1..0c0c71134 100644 --- a/crates/ide_completion/src/render/enum_variant.rs +++ b/crates/ide_completion/src/render/enum_variant.rs | |||
@@ -93,7 +93,7 @@ impl<'a> EnumRender<'a> { | |||
93 | .variant | 93 | .variant |
94 | .fields(self.ctx.db()) | 94 | .fields(self.ctx.db()) |
95 | .into_iter() | 95 | .into_iter() |
96 | .map(|field| (field.name(self.ctx.db()), field.signature_ty(self.ctx.db()))); | 96 | .map(|field| (field.name(self.ctx.db()), field.ty(self.ctx.db()))); |
97 | 97 | ||
98 | match self.variant_kind { | 98 | match self.variant_kind { |
99 | StructKind::Tuple | StructKind::Unit => format!( | 99 | StructKind::Tuple | StructKind::Unit => format!( |
diff --git a/crates/proc_macro_api/Cargo.toml b/crates/proc_macro_api/Cargo.toml index 1ba1e4abd..2ce5eeedd 100644 --- a/crates/proc_macro_api/Cargo.toml +++ b/crates/proc_macro_api/Cargo.toml | |||
@@ -15,7 +15,7 @@ serde_json = { version = "1.0", features = ["unbounded_depth"] } | |||
15 | log = "0.4.8" | 15 | log = "0.4.8" |
16 | crossbeam-channel = "0.5.0" | 16 | crossbeam-channel = "0.5.0" |
17 | jod-thread = "0.1.1" | 17 | jod-thread = "0.1.1" |
18 | memmap = "0.7.0" | 18 | memmap2 = "0.2.0" |
19 | object = { version = "0.23.0", default-features = false, features = ["std", "read_core", "elf", "macho", "pe", "unaligned"] } | 19 | object = { version = "0.23.0", default-features = false, features = ["std", "read_core", "elf", "macho", "pe", "unaligned"] } |
20 | snap = "1.0" | 20 | snap = "1.0" |
21 | 21 | ||
diff --git a/crates/proc_macro_api/src/version.rs b/crates/proc_macro_api/src/version.rs index dcf8fae8f..6dbac50b4 100644 --- a/crates/proc_macro_api/src/version.rs +++ b/crates/proc_macro_api/src/version.rs | |||
@@ -6,7 +6,7 @@ use std::{ | |||
6 | path::Path, | 6 | path::Path, |
7 | }; | 7 | }; |
8 | 8 | ||
9 | use memmap::Mmap; | 9 | use memmap2::Mmap; |
10 | use object::read::{File as BinaryFile, Object, ObjectSection}; | 10 | use object::read::{File as BinaryFile, Object, ObjectSection}; |
11 | use snap::read::FrameDecoder as SnapDecoder; | 11 | use snap::read::FrameDecoder as SnapDecoder; |
12 | 12 | ||
diff --git a/crates/rust-analyzer/src/benchmarks.rs b/crates/rust-analyzer/src/benchmarks.rs deleted file mode 100644 index bdd94b1c4..000000000 --- a/crates/rust-analyzer/src/benchmarks.rs +++ /dev/null | |||
@@ -1,74 +0,0 @@ | |||
1 | //! Fully integrated benchmarks for rust-analyzer, which load real cargo | ||
2 | //! projects. | ||
3 | //! | ||
4 | //! The benchmark here is used to debug specific performance regressions. If you | ||
5 | //! notice that, eg, completion is slow in some specific case, you can modify | ||
6 | //! code here exercise this specific completion, and thus have a fast | ||
7 | //! edit/compile/test cycle. | ||
8 | //! | ||
9 | //! Note that "Rust Analyzer: Run" action does not allow running a single test | ||
10 | //! in release mode in VS Code. There's however "Rust Analyzer: Copy Run Command Line" | ||
11 | //! which you can use to paste the command in terminal and add `--release` manually. | ||
12 | |||
13 | use std::sync::Arc; | ||
14 | |||
15 | use ide::Change; | ||
16 | use test_utils::project_root; | ||
17 | use vfs::{AbsPathBuf, VfsPath}; | ||
18 | |||
19 | use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig}; | ||
20 | |||
21 | #[test] | ||
22 | fn benchmark_integrated_highlighting() { | ||
23 | // Don't run slow benchmark by default | ||
24 | if true { | ||
25 | return; | ||
26 | } | ||
27 | |||
28 | // Load rust-analyzer itself. | ||
29 | let workspace_to_load = project_root(); | ||
30 | let file = "./crates/ide_db/src/apply_change.rs"; | ||
31 | |||
32 | let cargo_config = Default::default(); | ||
33 | let load_cargo_config = LoadCargoConfig { | ||
34 | load_out_dirs_from_check: true, | ||
35 | wrap_rustc: false, | ||
36 | with_proc_macro: false, | ||
37 | }; | ||
38 | |||
39 | let (mut host, vfs, _proc_macro) = { | ||
40 | let _it = stdx::timeit("workspace loading"); | ||
41 | load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap() | ||
42 | }; | ||
43 | |||
44 | let file_id = { | ||
45 | let file = workspace_to_load.join(file); | ||
46 | let path = VfsPath::from(AbsPathBuf::assert(file)); | ||
47 | vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path)) | ||
48 | }; | ||
49 | |||
50 | { | ||
51 | let _it = stdx::timeit("initial"); | ||
52 | let analysis = host.analysis(); | ||
53 | analysis.highlight_as_html(file_id, false).unwrap(); | ||
54 | } | ||
55 | |||
56 | profile::init_from("*>100"); | ||
57 | // let _s = profile::heartbeat_span(); | ||
58 | |||
59 | { | ||
60 | let _it = stdx::timeit("change"); | ||
61 | let mut text = host.analysis().file_text(file_id).unwrap().to_string(); | ||
62 | text.push_str("\npub fn _dummy() {}\n"); | ||
63 | let mut change = Change::new(); | ||
64 | change.change_file(file_id, Some(Arc::new(text))); | ||
65 | host.apply_change(change); | ||
66 | } | ||
67 | |||
68 | { | ||
69 | let _it = stdx::timeit("after change"); | ||
70 | let _span = profile::cpu_span(); | ||
71 | let analysis = host.analysis(); | ||
72 | analysis.highlight_as_html(file_id, false).unwrap(); | ||
73 | } | ||
74 | } | ||
diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs index 74f784338..c33c8179c 100644 --- a/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/crates/rust-analyzer/src/cli/diagnostics.rs | |||
@@ -7,7 +7,7 @@ use anyhow::anyhow; | |||
7 | use rustc_hash::FxHashSet; | 7 | use rustc_hash::FxHashSet; |
8 | 8 | ||
9 | use hir::{db::HirDatabase, Crate, Module}; | 9 | use hir::{db::HirDatabase, Crate, Module}; |
10 | use ide::{DiagnosticsConfig, Severity}; | 10 | use ide::{AssistResolveStrategy, DiagnosticsConfig, Severity}; |
11 | use ide_db::base_db::SourceDatabaseExt; | 11 | use ide_db::base_db::SourceDatabaseExt; |
12 | 12 | ||
13 | use crate::cli::{ | 13 | use crate::cli::{ |
@@ -57,8 +57,9 @@ pub fn diagnostics( | |||
57 | let crate_name = | 57 | let crate_name = |
58 | module.krate().display_name(db).as_deref().unwrap_or("unknown").to_string(); | 58 | module.krate().display_name(db).as_deref().unwrap_or("unknown").to_string(); |
59 | println!("processing crate: {}, module: {}", crate_name, _vfs.file_path(file_id)); | 59 | println!("processing crate: {}, module: {}", crate_name, _vfs.file_path(file_id)); |
60 | for diagnostic in | 60 | for diagnostic in analysis |
61 | analysis.diagnostics(&DiagnosticsConfig::default(), false, file_id).unwrap() | 61 | .diagnostics(&DiagnosticsConfig::default(), AssistResolveStrategy::None, file_id) |
62 | .unwrap() | ||
62 | { | 63 | { |
63 | if matches!(diagnostic.severity, Severity::Error) { | 64 | if matches!(diagnostic.severity, Severity::Error) { |
64 | found_error = true; | 65 | found_error = true; |
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index adeb7a97e..6f2f482c1 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs | |||
@@ -84,6 +84,7 @@ pub(crate) struct GlobalState { | |||
84 | pub(crate) workspace_build_data: Option<BuildDataResult>, | 84 | pub(crate) workspace_build_data: Option<BuildDataResult>, |
85 | pub(crate) fetch_build_data_queue: | 85 | pub(crate) fetch_build_data_queue: |
86 | OpQueue<BuildDataCollector, Option<anyhow::Result<BuildDataResult>>>, | 86 | OpQueue<BuildDataCollector, Option<anyhow::Result<BuildDataResult>>>, |
87 | pub(crate) prime_caches_queue: OpQueue<(), ()>, | ||
87 | 88 | ||
88 | latest_requests: Arc<RwLock<LatestRequests>>, | 89 | latest_requests: Arc<RwLock<LatestRequests>>, |
89 | } | 90 | } |
@@ -146,6 +147,7 @@ impl GlobalState { | |||
146 | workspaces: Arc::new(Vec::new()), | 147 | workspaces: Arc::new(Vec::new()), |
147 | fetch_workspaces_queue: OpQueue::default(), | 148 | fetch_workspaces_queue: OpQueue::default(), |
148 | workspace_build_data: None, | 149 | workspace_build_data: None, |
150 | prime_caches_queue: OpQueue::default(), | ||
149 | 151 | ||
150 | fetch_build_data_queue: OpQueue::default(), | 152 | fetch_build_data_queue: OpQueue::default(), |
151 | latest_requests: Default::default(), | 153 | latest_requests: Default::default(), |
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 1f59402e5..f6e40f872 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs | |||
@@ -8,8 +8,9 @@ use std::{ | |||
8 | }; | 8 | }; |
9 | 9 | ||
10 | use ide::{ | 10 | use ide::{ |
11 | AnnotationConfig, FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, Query, | 11 | AnnotationConfig, AssistKind, AssistResolveStrategy, FileId, FilePosition, FileRange, |
12 | RangeInfo, Runnable, RunnableKind, SearchScope, SourceChange, TextEdit, | 12 | HoverAction, HoverGotoTypeData, Query, RangeInfo, Runnable, RunnableKind, SearchScope, |
13 | SingleResolve, SourceChange, TextEdit, | ||
13 | }; | 14 | }; |
14 | use ide_db::SymbolKind; | 15 | use ide_db::SymbolKind; |
15 | use itertools::Itertools; | 16 | use itertools::Itertools; |
@@ -27,7 +28,7 @@ use lsp_types::{ | |||
27 | use project_model::TargetKind; | 28 | use project_model::TargetKind; |
28 | use serde::{Deserialize, Serialize}; | 29 | use serde::{Deserialize, Serialize}; |
29 | use serde_json::to_value; | 30 | use serde_json::to_value; |
30 | use stdx::{format_to, split_once}; | 31 | use stdx::format_to; |
31 | use syntax::{algo, ast, AstNode, TextRange, TextSize}; | 32 | use syntax::{algo, ast, AstNode, TextRange, TextSize}; |
32 | 33 | ||
33 | use crate::{ | 34 | use crate::{ |
@@ -1004,10 +1005,15 @@ pub(crate) fn handle_code_action( | |||
1004 | let mut res: Vec<lsp_ext::CodeAction> = Vec::new(); | 1005 | let mut res: Vec<lsp_ext::CodeAction> = Vec::new(); |
1005 | 1006 | ||
1006 | let code_action_resolve_cap = snap.config.code_action_resolve(); | 1007 | let code_action_resolve_cap = snap.config.code_action_resolve(); |
1008 | let resolve = if code_action_resolve_cap { | ||
1009 | AssistResolveStrategy::None | ||
1010 | } else { | ||
1011 | AssistResolveStrategy::All | ||
1012 | }; | ||
1007 | let assists = snap.analysis.assists_with_fixes( | 1013 | let assists = snap.analysis.assists_with_fixes( |
1008 | &assists_config, | 1014 | &assists_config, |
1009 | &snap.config.diagnostics(), | 1015 | &snap.config.diagnostics(), |
1010 | !code_action_resolve_cap, | 1016 | resolve, |
1011 | frange, | 1017 | frange, |
1012 | )?; | 1018 | )?; |
1013 | for (index, assist) in assists.into_iter().enumerate() { | 1019 | for (index, assist) in assists.into_iter().enumerate() { |
@@ -1052,22 +1058,68 @@ pub(crate) fn handle_code_action_resolve( | |||
1052 | .only | 1058 | .only |
1053 | .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect()); | 1059 | .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect()); |
1054 | 1060 | ||
1061 | let (assist_index, assist_resolve) = match parse_action_id(¶ms.id) { | ||
1062 | Ok(parsed_data) => parsed_data, | ||
1063 | Err(e) => { | ||
1064 | return Err(LspError::new( | ||
1065 | ErrorCode::InvalidParams as i32, | ||
1066 | format!("Failed to parse action id string '{}': {}", params.id, e), | ||
1067 | ) | ||
1068 | .into()) | ||
1069 | } | ||
1070 | }; | ||
1071 | |||
1072 | let expected_assist_id = assist_resolve.assist_id.clone(); | ||
1073 | let expected_kind = assist_resolve.assist_kind; | ||
1074 | |||
1055 | let assists = snap.analysis.assists_with_fixes( | 1075 | let assists = snap.analysis.assists_with_fixes( |
1056 | &assists_config, | 1076 | &assists_config, |
1057 | &snap.config.diagnostics(), | 1077 | &snap.config.diagnostics(), |
1058 | true, | 1078 | AssistResolveStrategy::Single(assist_resolve), |
1059 | frange, | 1079 | frange, |
1060 | )?; | 1080 | )?; |
1061 | 1081 | ||
1062 | let (id, index) = split_once(¶ms.id, ':').unwrap(); | 1082 | let assist = match assists.get(assist_index) { |
1063 | let index = index.parse::<usize>().unwrap(); | 1083 | Some(assist) => assist, |
1064 | let assist = &assists[index]; | 1084 | None => return Err(LspError::new( |
1065 | assert!(assist.id.0 == id); | 1085 | ErrorCode::InvalidParams as i32, |
1086 | format!( | ||
1087 | "Failed to find the assist for index {} provided by the resolve request. Resolve request assist id: {}", | ||
1088 | assist_index, params.id, | ||
1089 | ), | ||
1090 | ) | ||
1091 | .into()) | ||
1092 | }; | ||
1093 | if assist.id.0 != expected_assist_id || assist.id.1 != expected_kind { | ||
1094 | return Err(LspError::new( | ||
1095 | ErrorCode::InvalidParams as i32, | ||
1096 | format!( | ||
1097 | "Mismatching assist at index {} for the resolve parameters given. Resolve request assist id: {}, actual id: {:?}.", | ||
1098 | assist_index, params.id, assist.id | ||
1099 | ), | ||
1100 | ) | ||
1101 | .into()); | ||
1102 | } | ||
1066 | let edit = to_proto::code_action(&snap, assist.clone(), None)?.edit; | 1103 | let edit = to_proto::code_action(&snap, assist.clone(), None)?.edit; |
1067 | code_action.edit = edit; | 1104 | code_action.edit = edit; |
1068 | Ok(code_action) | 1105 | Ok(code_action) |
1069 | } | 1106 | } |
1070 | 1107 | ||
1108 | fn parse_action_id(action_id: &str) -> Result<(usize, SingleResolve), String> { | ||
1109 | let id_parts = action_id.split(':').collect_vec(); | ||
1110 | match id_parts.as_slice() { | ||
1111 | &[assist_id_string, assist_kind_string, index_string] => { | ||
1112 | let assist_kind: AssistKind = assist_kind_string.parse()?; | ||
1113 | let index: usize = match index_string.parse() { | ||
1114 | Ok(index) => index, | ||
1115 | Err(e) => return Err(format!("Incorrect index string: {}", e)), | ||
1116 | }; | ||
1117 | Ok((index, SingleResolve { assist_id: assist_id_string.to_string(), assist_kind })) | ||
1118 | } | ||
1119 | _ => Err("Action id contains incorrect number of segments".to_string()), | ||
1120 | } | ||
1121 | } | ||
1122 | |||
1071 | pub(crate) fn handle_code_lens( | 1123 | pub(crate) fn handle_code_lens( |
1072 | snap: GlobalStateSnapshot, | 1124 | snap: GlobalStateSnapshot, |
1073 | params: lsp_types::CodeLensParams, | 1125 | params: lsp_types::CodeLensParams, |
@@ -1182,7 +1234,7 @@ pub(crate) fn publish_diagnostics( | |||
1182 | 1234 | ||
1183 | let diagnostics: Vec<Diagnostic> = snap | 1235 | let diagnostics: Vec<Diagnostic> = snap |
1184 | .analysis | 1236 | .analysis |
1185 | .diagnostics(&snap.config.diagnostics(), false, file_id)? | 1237 | .diagnostics(&snap.config.diagnostics(), AssistResolveStrategy::None, file_id)? |
1186 | .into_iter() | 1238 | .into_iter() |
1187 | .map(|d| Diagnostic { | 1239 | .map(|d| Diagnostic { |
1188 | range: to_proto::range(&line_index, d.range), | 1240 | range: to_proto::range(&line_index, d.range), |
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs new file mode 100644 index 000000000..3dcbe397a --- /dev/null +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs | |||
@@ -0,0 +1,184 @@ | |||
1 | //! Fully integrated benchmarks for rust-analyzer, which load real cargo | ||
2 | //! projects. | ||
3 | //! | ||
4 | //! The benchmark here is used to debug specific performance regressions. If you | ||
5 | //! notice that, eg, completion is slow in some specific case, you can modify | ||
6 | //! code here exercise this specific completion, and thus have a fast | ||
7 | //! edit/compile/test cycle. | ||
8 | //! | ||
9 | //! Note that "Rust Analyzer: Run" action does not allow running a single test | ||
10 | //! in release mode in VS Code. There's however "Rust Analyzer: Copy Run Command Line" | ||
11 | //! which you can use to paste the command in terminal and add `--release` manually. | ||
12 | |||
13 | use std::{convert::TryFrom, sync::Arc}; | ||
14 | |||
15 | use ide::{Change, CompletionConfig, FilePosition, TextSize}; | ||
16 | use ide_db::helpers::{insert_use::InsertUseConfig, merge_imports::MergeBehavior, SnippetCap}; | ||
17 | use test_utils::project_root; | ||
18 | use vfs::{AbsPathBuf, VfsPath}; | ||
19 | |||
20 | use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig}; | ||
21 | |||
22 | #[test] | ||
23 | fn integrated_highlighting_benchmark() { | ||
24 | if std::env::var("RUN_SLOW_BENCHES").is_err() { | ||
25 | return; | ||
26 | } | ||
27 | |||
28 | // Load rust-analyzer itself. | ||
29 | let workspace_to_load = project_root(); | ||
30 | let file = "./crates/ide_db/src/apply_change.rs"; | ||
31 | |||
32 | let cargo_config = Default::default(); | ||
33 | let load_cargo_config = LoadCargoConfig { | ||
34 | load_out_dirs_from_check: true, | ||
35 | wrap_rustc: false, | ||
36 | with_proc_macro: false, | ||
37 | }; | ||
38 | |||
39 | let (mut host, vfs, _proc_macro) = { | ||
40 | let _it = stdx::timeit("workspace loading"); | ||
41 | load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap() | ||
42 | }; | ||
43 | |||
44 | let file_id = { | ||
45 | let file = workspace_to_load.join(file); | ||
46 | let path = VfsPath::from(AbsPathBuf::assert(file)); | ||
47 | vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path)) | ||
48 | }; | ||
49 | |||
50 | { | ||
51 | let _it = stdx::timeit("initial"); | ||
52 | let analysis = host.analysis(); | ||
53 | analysis.highlight_as_html(file_id, false).unwrap(); | ||
54 | } | ||
55 | |||
56 | profile::init_from("*>100"); | ||
57 | // let _s = profile::heartbeat_span(); | ||
58 | |||
59 | { | ||
60 | let _it = stdx::timeit("change"); | ||
61 | let mut text = host.analysis().file_text(file_id).unwrap().to_string(); | ||
62 | text.push_str("\npub fn _dummy() {}\n"); | ||
63 | let mut change = Change::new(); | ||
64 | change.change_file(file_id, Some(Arc::new(text))); | ||
65 | host.apply_change(change); | ||
66 | } | ||
67 | |||
68 | { | ||
69 | let _it = stdx::timeit("after change"); | ||
70 | let _span = profile::cpu_span(); | ||
71 | let analysis = host.analysis(); | ||
72 | analysis.highlight_as_html(file_id, false).unwrap(); | ||
73 | } | ||
74 | } | ||
75 | |||
76 | #[test] | ||
77 | fn integrated_completion_benchmark() { | ||
78 | if std::env::var("RUN_SLOW_BENCHES").is_err() { | ||
79 | return; | ||
80 | } | ||
81 | |||
82 | // Load rust-analyzer itself. | ||
83 | let workspace_to_load = project_root(); | ||
84 | let file = "./crates/hir/src/lib.rs"; | ||
85 | |||
86 | let cargo_config = Default::default(); | ||
87 | let load_cargo_config = LoadCargoConfig { | ||
88 | load_out_dirs_from_check: true, | ||
89 | wrap_rustc: false, | ||
90 | with_proc_macro: false, | ||
91 | }; | ||
92 | |||
93 | let (mut host, vfs, _proc_macro) = { | ||
94 | let _it = stdx::timeit("workspace loading"); | ||
95 | load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap() | ||
96 | }; | ||
97 | |||
98 | let file_id = { | ||
99 | let file = workspace_to_load.join(file); | ||
100 | let path = VfsPath::from(AbsPathBuf::assert(file)); | ||
101 | vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path)) | ||
102 | }; | ||
103 | |||
104 | { | ||
105 | let _it = stdx::timeit("initial"); | ||
106 | let analysis = host.analysis(); | ||
107 | analysis.highlight_as_html(file_id, false).unwrap(); | ||
108 | } | ||
109 | |||
110 | profile::init_from("*>5"); | ||
111 | // let _s = profile::heartbeat_span(); | ||
112 | |||
113 | let completion_offset = { | ||
114 | let _it = stdx::timeit("change"); | ||
115 | let mut text = host.analysis().file_text(file_id).unwrap().to_string(); | ||
116 | let completion_offset = | ||
117 | patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)") | ||
118 | + "sel".len(); | ||
119 | let mut change = Change::new(); | ||
120 | change.change_file(file_id, Some(Arc::new(text))); | ||
121 | host.apply_change(change); | ||
122 | completion_offset | ||
123 | }; | ||
124 | |||
125 | { | ||
126 | let _it = stdx::timeit("unqualified path completion"); | ||
127 | let _span = profile::cpu_span(); | ||
128 | let analysis = host.analysis(); | ||
129 | let config = CompletionConfig { | ||
130 | enable_postfix_completions: true, | ||
131 | enable_imports_on_the_fly: true, | ||
132 | add_call_parenthesis: true, | ||
133 | add_call_argument_snippets: true, | ||
134 | snippet_cap: SnippetCap::new(true), | ||
135 | insert_use: InsertUseConfig { | ||
136 | merge: Some(MergeBehavior::Full), | ||
137 | prefix_kind: hir::PrefixKind::ByCrate, | ||
138 | group: true, | ||
139 | }, | ||
140 | }; | ||
141 | let position = | ||
142 | FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; | ||
143 | analysis.completions(&config, position).unwrap(); | ||
144 | } | ||
145 | |||
146 | let completion_offset = { | ||
147 | let _it = stdx::timeit("change"); | ||
148 | let mut text = host.analysis().file_text(file_id).unwrap().to_string(); | ||
149 | let completion_offset = | ||
150 | patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)") | ||
151 | + "self.".len(); | ||
152 | let mut change = Change::new(); | ||
153 | change.change_file(file_id, Some(Arc::new(text))); | ||
154 | host.apply_change(change); | ||
155 | completion_offset | ||
156 | }; | ||
157 | |||
158 | { | ||
159 | let _it = stdx::timeit("dot completion"); | ||
160 | let _span = profile::cpu_span(); | ||
161 | let analysis = host.analysis(); | ||
162 | let config = CompletionConfig { | ||
163 | enable_postfix_completions: true, | ||
164 | enable_imports_on_the_fly: true, | ||
165 | add_call_parenthesis: true, | ||
166 | add_call_argument_snippets: true, | ||
167 | snippet_cap: SnippetCap::new(true), | ||
168 | insert_use: InsertUseConfig { | ||
169 | merge: Some(MergeBehavior::Full), | ||
170 | prefix_kind: hir::PrefixKind::ByCrate, | ||
171 | group: true, | ||
172 | }, | ||
173 | }; | ||
174 | let position = | ||
175 | FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; | ||
176 | analysis.completions(&config, position).unwrap(); | ||
177 | } | ||
178 | } | ||
179 | |||
180 | fn patch(what: &mut String, from: &str, to: &str) -> usize { | ||
181 | let idx = what.find(from).unwrap(); | ||
182 | *what = what.replacen(from, to, 1); | ||
183 | idx | ||
184 | } | ||
diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index d9a5030a0..da7e24bec 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs | |||
@@ -40,7 +40,7 @@ pub mod lsp_ext; | |||
40 | pub mod config; | 40 | pub mod config; |
41 | 41 | ||
42 | #[cfg(test)] | 42 | #[cfg(test)] |
43 | mod benchmarks; | 43 | mod integrated_benchmarks; |
44 | 44 | ||
45 | use serde::de::DeserializeOwned; | 45 | use serde::de::DeserializeOwned; |
46 | use std::fmt; | 46 | use std::fmt; |
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index a766aacad..ce7ece559 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs | |||
@@ -8,8 +8,7 @@ use std::{ | |||
8 | 8 | ||
9 | use always_assert::always; | 9 | use always_assert::always; |
10 | use crossbeam_channel::{select, Receiver}; | 10 | use crossbeam_channel::{select, Receiver}; |
11 | use ide::PrimeCachesProgress; | 11 | use ide::{FileId, PrimeCachesProgress}; |
12 | use ide::{Canceled, FileId}; | ||
13 | use ide_db::base_db::VfsPath; | 12 | use ide_db::base_db::VfsPath; |
14 | use lsp_server::{Connection, Notification, Request, Response}; | 13 | use lsp_server::{Connection, Notification, Request, Response}; |
15 | use lsp_types::notification::Notification as _; | 14 | use lsp_types::notification::Notification as _; |
@@ -295,6 +294,8 @@ impl GlobalState { | |||
295 | state = Progress::End; | 294 | state = Progress::End; |
296 | message = None; | 295 | message = None; |
297 | fraction = 1.0; | 296 | fraction = 1.0; |
297 | |||
298 | self.prime_caches_queue.op_completed(()); | ||
298 | } | 299 | } |
299 | }; | 300 | }; |
300 | 301 | ||
@@ -711,18 +712,23 @@ impl GlobalState { | |||
711 | } | 712 | } |
712 | fn update_file_notifications_on_threadpool(&mut self) { | 713 | fn update_file_notifications_on_threadpool(&mut self) { |
713 | self.maybe_update_diagnostics(); | 714 | self.maybe_update_diagnostics(); |
715 | |||
716 | // Ensure that only one cache priming task can run at a time | ||
717 | self.prime_caches_queue.request_op(()); | ||
718 | if self.prime_caches_queue.should_start_op().is_none() { | ||
719 | return; | ||
720 | } | ||
721 | |||
714 | self.task_pool.handle.spawn_with_sender({ | 722 | self.task_pool.handle.spawn_with_sender({ |
715 | let snap = self.snapshot(); | 723 | let snap = self.snapshot(); |
716 | move |sender| { | 724 | move |sender| { |
717 | snap.analysis | 725 | let cb = |progress| { |
718 | .prime_caches(|progress| { | 726 | sender.send(Task::PrimeCaches(progress)).unwrap(); |
719 | sender.send(Task::PrimeCaches(progress)).unwrap(); | 727 | }; |
720 | }) | 728 | match snap.analysis.prime_caches(cb) { |
721 | .unwrap_or_else(|_: Canceled| { | 729 | Ok(()) => (), |
722 | // Pretend that we're done, so that the progress bar is removed. Otherwise | 730 | Err(_canceled) => (), |
723 | // the editor may complain about it already existing. | 731 | } |
724 | sender.send(Task::PrimeCaches(PrimeCachesProgress::Finished)).unwrap() | ||
725 | }); | ||
726 | } | 732 | } |
727 | }); | 733 | }); |
728 | } | 734 | } |
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 5eff036ec..ecf6fd12f 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs | |||
@@ -898,7 +898,7 @@ pub(crate) fn code_action( | |||
898 | (Some(it), _) => res.edit = Some(snippet_workspace_edit(snap, it)?), | 898 | (Some(it), _) => res.edit = Some(snippet_workspace_edit(snap, it)?), |
899 | (None, Some((index, code_action_params))) => { | 899 | (None, Some((index, code_action_params))) => { |
900 | res.data = Some(lsp_ext::CodeActionData { | 900 | res.data = Some(lsp_ext::CodeActionData { |
901 | id: format!("{}:{}", assist.id.0, index.to_string()), | 901 | id: format!("{}:{}:{}", assist.id.0, assist.id.1.name(), index), |
902 | code_action_params, | 902 | code_action_params, |
903 | }); | 903 | }); |
904 | } | 904 | } |