diff options
124 files changed, 4286 insertions, 2535 deletions
diff --git a/Cargo.lock b/Cargo.lock index ca519a474..15ccf4146 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -636,6 +636,7 @@ dependencies = [ | |||
636 | "ide_db", | 636 | "ide_db", |
637 | "itertools", | 637 | "itertools", |
638 | "log", | 638 | "log", |
639 | "once_cell", | ||
639 | "profile", | 640 | "profile", |
640 | "rustc-hash", | 641 | "rustc-hash", |
641 | "stdx", | 642 | "stdx", |
diff --git a/crates/base_db/src/fixture.rs b/crates/base_db/src/fixture.rs index 0132565e4..69ceba735 100644 --- a/crates/base_db/src/fixture.rs +++ b/crates/base_db/src/fixture.rs | |||
@@ -34,19 +34,13 @@ pub trait WithFixture: Default + SourceDatabaseExt + 'static { | |||
34 | 34 | ||
35 | fn with_position(ra_fixture: &str) -> (Self, FilePosition) { | 35 | fn with_position(ra_fixture: &str) -> (Self, FilePosition) { |
36 | let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture); | 36 | let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture); |
37 | let offset = match range_or_offset { | 37 | let offset = range_or_offset.expect_offset(); |
38 | RangeOrOffset::Range(_) => panic!("Expected a cursor position, got a range instead"), | ||
39 | RangeOrOffset::Offset(it) => it, | ||
40 | }; | ||
41 | (db, FilePosition { file_id, offset }) | 38 | (db, FilePosition { file_id, offset }) |
42 | } | 39 | } |
43 | 40 | ||
44 | fn with_range(ra_fixture: &str) -> (Self, FileRange) { | 41 | fn with_range(ra_fixture: &str) -> (Self, FileRange) { |
45 | let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture); | 42 | let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture); |
46 | let range = match range_or_offset { | 43 | let range = range_or_offset.expect_range(); |
47 | RangeOrOffset::Range(it) => it, | ||
48 | RangeOrOffset::Offset(_) => panic!("Expected a cursor range, got a position instead"), | ||
49 | }; | ||
50 | (db, FileRange { file_id, range }) | 44 | (db, FileRange { file_id, range }) |
51 | } | 45 | } |
52 | 46 | ||
diff --git a/crates/base_db/src/input.rs b/crates/base_db/src/input.rs index 0ef77ef5d..23cb0c839 100644 --- a/crates/base_db/src/input.rs +++ b/crates/base_db/src/input.rs | |||
@@ -53,11 +53,15 @@ impl SourceRoot { | |||
53 | } | 53 | } |
54 | 54 | ||
55 | /// `CrateGraph` is a bit of information which turns a set of text files into a | 55 | /// `CrateGraph` is a bit of information which turns a set of text files into a |
56 | /// number of Rust crates. Each crate is defined by the `FileId` of its root module, | 56 | /// number of Rust crates. |
57 | /// the set of cfg flags (not yet implemented) and the set of dependencies. Note | 57 | /// |
58 | /// that, due to cfg's, there might be several crates for a single `FileId`! As | 58 | /// Each crate is defined by the `FileId` of its root module, the set of enabled |
59 | /// in the rust-lang proper, a crate does not have a name. Instead, names are | 59 | /// `cfg` flags and the set of dependencies. |
60 | /// specified on dependency edges. That is, a crate might be known under | 60 | /// |
61 | /// Note that, due to cfg's, there might be several crates for a single `FileId`! | ||
62 | /// | ||
63 | /// For the purposes of analysis, a crate does not have a name. Instead, names | ||
64 | /// are specified on dependency edges. That is, a crate might be known under | ||
61 | /// different names in different dependent crates. | 65 | /// different names in different dependent crates. |
62 | /// | 66 | /// |
63 | /// Note that `CrateGraph` is build-system agnostic: it's a concept of the Rust | 67 | /// Note that `CrateGraph` is build-system agnostic: it's a concept of the Rust |
@@ -143,7 +147,7 @@ impl CrateDisplayName { | |||
143 | #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] | 147 | #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] |
144 | pub struct ProcMacroId(pub u32); | 148 | pub struct ProcMacroId(pub u32); |
145 | 149 | ||
146 | #[derive(Copy, Clone, Eq, PartialEq, Debug)] | 150 | #[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] |
147 | pub enum ProcMacroKind { | 151 | pub enum ProcMacroKind { |
148 | CustomDerive, | 152 | CustomDerive, |
149 | FuncLike, | 153 | FuncLike, |
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 414c3f35e..22ec7c6ac 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs | |||
@@ -3,13 +3,227 @@ | |||
3 | //! | 3 | //! |
4 | //! This probably isn't the best way to do this -- ideally, diagnistics should | 4 | //! This probably isn't the best way to do this -- ideally, diagnistics should |
5 | //! be expressed in terms of hir types themselves. | 5 | //! be expressed in terms of hir types themselves. |
6 | pub use hir_def::diagnostics::{ | 6 | use std::any::Any; |
7 | InactiveCode, UnresolvedMacroCall, UnresolvedModule, UnresolvedProcMacro, | 7 | |
8 | }; | 8 | use cfg::{CfgExpr, CfgOptions, DnfExpr}; |
9 | pub use hir_expand::diagnostics::{ | 9 | use hir_def::path::ModPath; |
10 | Diagnostic, DiagnosticCode, DiagnosticSink, DiagnosticSinkBuilder, | 10 | use hir_expand::{HirFileId, InFile}; |
11 | }; | 11 | use stdx::format_to; |
12 | pub use hir_ty::diagnostics::{ | 12 | use syntax::{ast, AstPtr, SyntaxNodePtr, TextRange}; |
13 | IncorrectCase, MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkOrSomeInTailExpr, | 13 | |
14 | NoSuchField, RemoveThisSemicolon, ReplaceFilterMapNextWithFindMap, | 14 | pub use hir_ty::{ |
15 | diagnostics::{ | ||
16 | IncorrectCase, MismatchedArgCount, MissingFields, MissingMatchArms, | ||
17 | MissingOkOrSomeInTailExpr, NoSuchField, RemoveThisSemicolon, | ||
18 | ReplaceFilterMapNextWithFindMap, | ||
19 | }, | ||
20 | diagnostics_sink::{Diagnostic, DiagnosticCode, DiagnosticSink, DiagnosticSinkBuilder}, | ||
15 | }; | 21 | }; |
22 | |||
23 | // Diagnostic: unresolved-module | ||
24 | // | ||
25 | // This diagnostic is triggered if rust-analyzer is unable to discover referred module. | ||
26 | #[derive(Debug)] | ||
27 | pub struct UnresolvedModule { | ||
28 | pub file: HirFileId, | ||
29 | pub decl: AstPtr<ast::Module>, | ||
30 | pub candidate: String, | ||
31 | } | ||
32 | |||
33 | impl Diagnostic for UnresolvedModule { | ||
34 | fn code(&self) -> DiagnosticCode { | ||
35 | DiagnosticCode("unresolved-module") | ||
36 | } | ||
37 | fn message(&self) -> String { | ||
38 | "unresolved module".to_string() | ||
39 | } | ||
40 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
41 | InFile::new(self.file, self.decl.clone().into()) | ||
42 | } | ||
43 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
44 | self | ||
45 | } | ||
46 | } | ||
47 | |||
48 | // Diagnostic: unresolved-extern-crate | ||
49 | // | ||
50 | // This diagnostic is triggered if rust-analyzer is unable to discover referred extern crate. | ||
51 | #[derive(Debug)] | ||
52 | pub struct UnresolvedExternCrate { | ||
53 | pub file: HirFileId, | ||
54 | pub item: AstPtr<ast::ExternCrate>, | ||
55 | } | ||
56 | |||
57 | impl Diagnostic for UnresolvedExternCrate { | ||
58 | fn code(&self) -> DiagnosticCode { | ||
59 | DiagnosticCode("unresolved-extern-crate") | ||
60 | } | ||
61 | fn message(&self) -> String { | ||
62 | "unresolved extern crate".to_string() | ||
63 | } | ||
64 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
65 | InFile::new(self.file, self.item.clone().into()) | ||
66 | } | ||
67 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
68 | self | ||
69 | } | ||
70 | } | ||
71 | |||
72 | #[derive(Debug)] | ||
73 | pub struct UnresolvedImport { | ||
74 | pub file: HirFileId, | ||
75 | pub node: AstPtr<ast::UseTree>, | ||
76 | } | ||
77 | |||
78 | impl Diagnostic for UnresolvedImport { | ||
79 | fn code(&self) -> DiagnosticCode { | ||
80 | DiagnosticCode("unresolved-import") | ||
81 | } | ||
82 | fn message(&self) -> String { | ||
83 | "unresolved import".to_string() | ||
84 | } | ||
85 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
86 | InFile::new(self.file, self.node.clone().into()) | ||
87 | } | ||
88 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
89 | self | ||
90 | } | ||
91 | fn is_experimental(&self) -> bool { | ||
92 | // This currently results in false positives in the following cases: | ||
93 | // - `cfg_if!`-generated code in libstd (we don't load the sysroot correctly) | ||
94 | // - `core::arch` (we don't handle `#[path = "../<path>"]` correctly) | ||
95 | // - proc macros and/or proc macro generated code | ||
96 | true | ||
97 | } | ||
98 | } | ||
99 | |||
100 | // Diagnostic: unresolved-macro-call | ||
101 | // | ||
102 | // This diagnostic is triggered if rust-analyzer is unable to resolve the path to a | ||
103 | // macro in a macro invocation. | ||
104 | #[derive(Debug, Clone, Eq, PartialEq)] | ||
105 | pub struct UnresolvedMacroCall { | ||
106 | pub file: HirFileId, | ||
107 | pub node: AstPtr<ast::MacroCall>, | ||
108 | pub path: ModPath, | ||
109 | } | ||
110 | |||
111 | impl Diagnostic for UnresolvedMacroCall { | ||
112 | fn code(&self) -> DiagnosticCode { | ||
113 | DiagnosticCode("unresolved-macro-call") | ||
114 | } | ||
115 | fn message(&self) -> String { | ||
116 | format!("unresolved macro `{}!`", self.path) | ||
117 | } | ||
118 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
119 | InFile::new(self.file, self.node.clone().into()) | ||
120 | } | ||
121 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
122 | self | ||
123 | } | ||
124 | fn is_experimental(&self) -> bool { | ||
125 | true | ||
126 | } | ||
127 | } | ||
128 | |||
129 | // Diagnostic: inactive-code | ||
130 | // | ||
131 | // This diagnostic is shown for code with inactive `#[cfg]` attributes. | ||
132 | #[derive(Debug, Clone, Eq, PartialEq)] | ||
133 | pub struct InactiveCode { | ||
134 | pub file: HirFileId, | ||
135 | pub node: SyntaxNodePtr, | ||
136 | pub cfg: CfgExpr, | ||
137 | pub opts: CfgOptions, | ||
138 | } | ||
139 | |||
140 | impl Diagnostic for InactiveCode { | ||
141 | fn code(&self) -> DiagnosticCode { | ||
142 | DiagnosticCode("inactive-code") | ||
143 | } | ||
144 | fn message(&self) -> String { | ||
145 | let inactive = DnfExpr::new(self.cfg.clone()).why_inactive(&self.opts); | ||
146 | let mut buf = "code is inactive due to #[cfg] directives".to_string(); | ||
147 | |||
148 | if let Some(inactive) = inactive { | ||
149 | format_to!(buf, ": {}", inactive); | ||
150 | } | ||
151 | |||
152 | buf | ||
153 | } | ||
154 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
155 | InFile::new(self.file, self.node.clone()) | ||
156 | } | ||
157 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
158 | self | ||
159 | } | ||
160 | } | ||
161 | |||
162 | // Diagnostic: unresolved-proc-macro | ||
163 | // | ||
164 | // This diagnostic is shown when a procedural macro can not be found. This usually means that | ||
165 | // procedural macro support is simply disabled (and hence is only a weak hint instead of an error), | ||
166 | // but can also indicate project setup problems. | ||
167 | // | ||
168 | // If you are seeing a lot of "proc macro not expanded" warnings, you can add this option to the | ||
169 | // `rust-analyzer.diagnostics.disabled` list to prevent them from showing. Alternatively you can | ||
170 | // enable support for procedural macros (see `rust-analyzer.procMacro.enable`). | ||
171 | #[derive(Debug, Clone, Eq, PartialEq)] | ||
172 | pub struct UnresolvedProcMacro { | ||
173 | pub file: HirFileId, | ||
174 | pub node: SyntaxNodePtr, | ||
175 | /// If the diagnostic can be pinpointed more accurately than via `node`, this is the `TextRange` | ||
176 | /// to use instead. | ||
177 | pub precise_location: Option<TextRange>, | ||
178 | pub macro_name: Option<String>, | ||
179 | } | ||
180 | |||
181 | impl Diagnostic for UnresolvedProcMacro { | ||
182 | fn code(&self) -> DiagnosticCode { | ||
183 | DiagnosticCode("unresolved-proc-macro") | ||
184 | } | ||
185 | |||
186 | fn message(&self) -> String { | ||
187 | match &self.macro_name { | ||
188 | Some(name) => format!("proc macro `{}` not expanded", name), | ||
189 | None => "proc macro not expanded".to_string(), | ||
190 | } | ||
191 | } | ||
192 | |||
193 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
194 | InFile::new(self.file, self.node.clone()) | ||
195 | } | ||
196 | |||
197 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
198 | self | ||
199 | } | ||
200 | } | ||
201 | |||
202 | // Diagnostic: macro-error | ||
203 | // | ||
204 | // This diagnostic is shown for macro expansion errors. | ||
205 | #[derive(Debug, Clone, Eq, PartialEq)] | ||
206 | pub struct MacroError { | ||
207 | pub file: HirFileId, | ||
208 | pub node: SyntaxNodePtr, | ||
209 | pub message: String, | ||
210 | } | ||
211 | |||
212 | impl Diagnostic for MacroError { | ||
213 | fn code(&self) -> DiagnosticCode { | ||
214 | DiagnosticCode("macro-error") | ||
215 | } | ||
216 | fn message(&self) -> String { | ||
217 | self.message.clone() | ||
218 | } | ||
219 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
220 | InFile::new(self.file, self.node.clone()) | ||
221 | } | ||
222 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
223 | self | ||
224 | } | ||
225 | fn is_experimental(&self) -> bool { | ||
226 | // Newly added and not very well-tested, might contain false positives. | ||
227 | true | ||
228 | } | ||
229 | } | ||
diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs index 508ac37c2..72f0d9b5f 100644 --- a/crates/hir/src/display.rs +++ b/crates/hir/src/display.rs | |||
@@ -92,7 +92,7 @@ impl HirDisplay for Function { | |||
92 | &data.ret_type | 92 | &data.ret_type |
93 | } else { | 93 | } else { |
94 | match &*data.ret_type { | 94 | match &*data.ret_type { |
95 | TypeRef::ImplTrait(bounds) => match &bounds[0] { | 95 | TypeRef::ImplTrait(bounds) => match bounds[0].as_ref() { |
96 | TypeBound::Path(path) => { | 96 | TypeBound::Path(path) => { |
97 | path.segments().iter().last().unwrap().args_and_bindings.unwrap().bindings | 97 | path.segments().iter().last().unwrap().args_and_bindings.unwrap().bindings |
98 | [0] | 98 | [0] |
@@ -427,10 +427,6 @@ impl HirDisplay for Trait { | |||
427 | write!(f, "trait {}", data.name)?; | 427 | write!(f, "trait {}", data.name)?; |
428 | let def_id = GenericDefId::TraitId(self.id); | 428 | let def_id = GenericDefId::TraitId(self.id); |
429 | write_generic_params(def_id, f)?; | 429 | write_generic_params(def_id, f)?; |
430 | if !data.bounds.is_empty() { | ||
431 | write!(f, ": ")?; | ||
432 | f.write_joined(&*data.bounds, " + ")?; | ||
433 | } | ||
434 | write_where_clause(def_id, f)?; | 430 | write_where_clause(def_id, f)?; |
435 | Ok(()) | 431 | Ok(()) |
436 | } | 432 | } |
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index ca9a7f7fa..975ae4869 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs | |||
@@ -35,12 +35,18 @@ use std::{iter, sync::Arc}; | |||
35 | 35 | ||
36 | use arrayvec::ArrayVec; | 36 | use arrayvec::ArrayVec; |
37 | use base_db::{CrateDisplayName, CrateId, Edition, FileId}; | 37 | use base_db::{CrateDisplayName, CrateId, Edition, FileId}; |
38 | use diagnostics::{ | ||
39 | InactiveCode, MacroError, UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall, | ||
40 | UnresolvedModule, UnresolvedProcMacro, | ||
41 | }; | ||
38 | use either::Either; | 42 | use either::Either; |
39 | use hir_def::{ | 43 | use hir_def::{ |
40 | adt::{ReprKind, VariantData}, | 44 | adt::{ReprKind, VariantData}, |
45 | body::BodyDiagnostic, | ||
41 | expr::{BindingAnnotation, LabelId, Pat, PatId}, | 46 | expr::{BindingAnnotation, LabelId, Pat, PatId}, |
42 | item_tree::ItemTreeNode, | 47 | item_tree::ItemTreeNode, |
43 | lang_item::LangItemTarget, | 48 | lang_item::LangItemTarget, |
49 | nameres, | ||
44 | per_ns::PerNs, | 50 | per_ns::PerNs, |
45 | resolver::{HasResolver, Resolver}, | 51 | resolver::{HasResolver, Resolver}, |
46 | src::HasSource as _, | 52 | src::HasSource as _, |
@@ -50,11 +56,12 @@ use hir_def::{ | |||
50 | LocalEnumVariantId, LocalFieldId, Lookup, ModuleId, StaticId, StructId, TraitId, TypeAliasId, | 56 | LocalEnumVariantId, LocalFieldId, Lookup, ModuleId, StaticId, StructId, TraitId, TypeAliasId, |
51 | TypeParamId, UnionId, | 57 | TypeParamId, UnionId, |
52 | }; | 58 | }; |
53 | use hir_expand::{diagnostics::DiagnosticSink, name::name, MacroDefKind}; | 59 | use hir_expand::{name::name, MacroCallKind, MacroDefKind}; |
54 | use hir_ty::{ | 60 | use hir_ty::{ |
55 | autoderef, | 61 | autoderef, |
56 | consteval::ConstExt, | 62 | consteval::ConstExt, |
57 | could_unify, | 63 | could_unify, |
64 | diagnostics_sink::DiagnosticSink, | ||
58 | method_resolution::{self, def_crates, TyFingerprint}, | 65 | method_resolution::{self, def_crates, TyFingerprint}, |
59 | primitive::UintTy, | 66 | primitive::UintTy, |
60 | subst_prefix, | 67 | subst_prefix, |
@@ -65,11 +72,12 @@ use hir_ty::{ | |||
65 | WhereClause, | 72 | WhereClause, |
66 | }; | 73 | }; |
67 | use itertools::Itertools; | 74 | use itertools::Itertools; |
75 | use nameres::diagnostics::DefDiagnosticKind; | ||
68 | use rustc_hash::FxHashSet; | 76 | use rustc_hash::FxHashSet; |
69 | use stdx::{format_to, impl_from}; | 77 | use stdx::{format_to, impl_from}; |
70 | use syntax::{ | 78 | use syntax::{ |
71 | ast::{self, AttrsOwner, NameOwner}, | 79 | ast::{self, AttrsOwner, NameOwner}, |
72 | AstNode, SmolStr, | 80 | AstNode, AstPtr, SmolStr, SyntaxKind, SyntaxNodePtr, |
73 | }; | 81 | }; |
74 | use tt::{Ident, Leaf, Literal, TokenTree}; | 82 | use tt::{Ident, Leaf, Literal, TokenTree}; |
75 | 83 | ||
@@ -442,7 +450,123 @@ impl Module { | |||
442 | format!("{:?}", self.name(db).map_or("<unknown>".into(), |name| name.to_string())) | 450 | format!("{:?}", self.name(db).map_or("<unknown>".into(), |name| name.to_string())) |
443 | }); | 451 | }); |
444 | let def_map = self.id.def_map(db.upcast()); | 452 | let def_map = self.id.def_map(db.upcast()); |
445 | def_map.add_diagnostics(db.upcast(), self.id.local_id, sink); | 453 | for diag in def_map.diagnostics() { |
454 | if diag.in_module != self.id.local_id { | ||
455 | // FIXME: This is accidentally quadratic. | ||
456 | continue; | ||
457 | } | ||
458 | match &diag.kind { | ||
459 | DefDiagnosticKind::UnresolvedModule { ast: declaration, candidate } => { | ||
460 | let decl = declaration.to_node(db.upcast()); | ||
461 | sink.push(UnresolvedModule { | ||
462 | file: declaration.file_id, | ||
463 | decl: AstPtr::new(&decl), | ||
464 | candidate: candidate.clone(), | ||
465 | }) | ||
466 | } | ||
467 | DefDiagnosticKind::UnresolvedExternCrate { ast } => { | ||
468 | let item = ast.to_node(db.upcast()); | ||
469 | sink.push(UnresolvedExternCrate { | ||
470 | file: ast.file_id, | ||
471 | item: AstPtr::new(&item), | ||
472 | }); | ||
473 | } | ||
474 | |||
475 | DefDiagnosticKind::UnresolvedImport { id, index } => { | ||
476 | let file_id = id.file_id(); | ||
477 | let item_tree = id.item_tree(db.upcast()); | ||
478 | let import = &item_tree[id.value]; | ||
479 | |||
480 | let use_tree = import.use_tree_to_ast(db.upcast(), file_id, *index); | ||
481 | sink.push(UnresolvedImport { file: file_id, node: AstPtr::new(&use_tree) }); | ||
482 | } | ||
483 | |||
484 | DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } => { | ||
485 | let item = ast.to_node(db.upcast()); | ||
486 | sink.push(InactiveCode { | ||
487 | file: ast.file_id, | ||
488 | node: AstPtr::new(&item).into(), | ||
489 | cfg: cfg.clone(), | ||
490 | opts: opts.clone(), | ||
491 | }); | ||
492 | } | ||
493 | |||
494 | DefDiagnosticKind::UnresolvedProcMacro { ast } => { | ||
495 | let mut precise_location = None; | ||
496 | let (file, ast, name) = match ast { | ||
497 | MacroCallKind::FnLike { ast_id, .. } => { | ||
498 | let node = ast_id.to_node(db.upcast()); | ||
499 | (ast_id.file_id, SyntaxNodePtr::from(AstPtr::new(&node)), None) | ||
500 | } | ||
501 | MacroCallKind::Derive { ast_id, derive_name, .. } => { | ||
502 | let node = ast_id.to_node(db.upcast()); | ||
503 | |||
504 | // Compute the precise location of the macro name's token in the derive | ||
505 | // list. | ||
506 | // FIXME: This does not handle paths to the macro, but neither does the | ||
507 | // rest of r-a. | ||
508 | let derive_attrs = | ||
509 | node.attrs().filter_map(|attr| match attr.as_simple_call() { | ||
510 | Some((name, args)) if name == "derive" => Some(args), | ||
511 | _ => None, | ||
512 | }); | ||
513 | 'outer: for attr in derive_attrs { | ||
514 | let tokens = | ||
515 | attr.syntax().children_with_tokens().filter_map(|elem| { | ||
516 | match elem { | ||
517 | syntax::NodeOrToken::Node(_) => None, | ||
518 | syntax::NodeOrToken::Token(tok) => Some(tok), | ||
519 | } | ||
520 | }); | ||
521 | for token in tokens { | ||
522 | if token.kind() == SyntaxKind::IDENT | ||
523 | && token.text() == derive_name.as_str() | ||
524 | { | ||
525 | precise_location = Some(token.text_range()); | ||
526 | break 'outer; | ||
527 | } | ||
528 | } | ||
529 | } | ||
530 | |||
531 | ( | ||
532 | ast_id.file_id, | ||
533 | SyntaxNodePtr::from(AstPtr::new(&node)), | ||
534 | Some(derive_name.clone()), | ||
535 | ) | ||
536 | } | ||
537 | }; | ||
538 | sink.push(UnresolvedProcMacro { | ||
539 | file, | ||
540 | node: ast, | ||
541 | precise_location, | ||
542 | macro_name: name, | ||
543 | }); | ||
544 | } | ||
545 | |||
546 | DefDiagnosticKind::UnresolvedMacroCall { ast, path } => { | ||
547 | let node = ast.to_node(db.upcast()); | ||
548 | sink.push(UnresolvedMacroCall { | ||
549 | file: ast.file_id, | ||
550 | node: AstPtr::new(&node), | ||
551 | path: path.clone(), | ||
552 | }); | ||
553 | } | ||
554 | |||
555 | DefDiagnosticKind::MacroError { ast, message } => { | ||
556 | let (file, ast) = match ast { | ||
557 | MacroCallKind::FnLike { ast_id, .. } => { | ||
558 | let node = ast_id.to_node(db.upcast()); | ||
559 | (ast_id.file_id, SyntaxNodePtr::from(AstPtr::new(&node))) | ||
560 | } | ||
561 | MacroCallKind::Derive { ast_id, .. } => { | ||
562 | let node = ast_id.to_node(db.upcast()); | ||
563 | (ast_id.file_id, SyntaxNodePtr::from(AstPtr::new(&node))) | ||
564 | } | ||
565 | }; | ||
566 | sink.push(MacroError { file, node: ast, message: message.clone() }); | ||
567 | } | ||
568 | } | ||
569 | } | ||
446 | for decl in self.declarations(db) { | 570 | for decl in self.declarations(db) { |
447 | match decl { | 571 | match decl { |
448 | crate::ModuleDef::Function(f) => f.diagnostics(db, sink), | 572 | crate::ModuleDef::Function(f) => f.diagnostics(db, sink), |
@@ -551,10 +675,6 @@ impl Struct { | |||
551 | Module { id: self.id.lookup(db.upcast()).container } | 675 | Module { id: self.id.lookup(db.upcast()).container } |
552 | } | 676 | } |
553 | 677 | ||
554 | pub fn krate(self, db: &dyn HirDatabase) -> Option<Crate> { | ||
555 | Some(self.module(db).krate()) | ||
556 | } | ||
557 | |||
558 | pub fn name(self, db: &dyn HirDatabase) -> Name { | 678 | pub fn name(self, db: &dyn HirDatabase) -> Name { |
559 | db.struct_data(self.id).name.clone() | 679 | db.struct_data(self.id).name.clone() |
560 | } | 680 | } |
@@ -639,10 +759,6 @@ impl Enum { | |||
639 | Module { id: self.id.lookup(db.upcast()).container } | 759 | Module { id: self.id.lookup(db.upcast()).container } |
640 | } | 760 | } |
641 | 761 | ||
642 | pub fn krate(self, db: &dyn HirDatabase) -> Option<Crate> { | ||
643 | Some(self.module(db).krate()) | ||
644 | } | ||
645 | |||
646 | pub fn name(self, db: &dyn HirDatabase) -> Name { | 762 | pub fn name(self, db: &dyn HirDatabase) -> Name { |
647 | db.enum_data(self.id).name.clone() | 763 | db.enum_data(self.id).name.clone() |
648 | } | 764 | } |
@@ -672,6 +788,7 @@ impl Variant { | |||
672 | pub fn module(self, db: &dyn HirDatabase) -> Module { | 788 | pub fn module(self, db: &dyn HirDatabase) -> Module { |
673 | self.parent.module(db) | 789 | self.parent.module(db) |
674 | } | 790 | } |
791 | |||
675 | pub fn parent_enum(self, _db: &dyn HirDatabase) -> Enum { | 792 | pub fn parent_enum(self, _db: &dyn HirDatabase) -> Enum { |
676 | self.parent | 793 | self.parent |
677 | } | 794 | } |
@@ -728,10 +845,6 @@ impl Adt { | |||
728 | } | 845 | } |
729 | } | 846 | } |
730 | 847 | ||
731 | pub fn krate(self, db: &dyn HirDatabase) -> Crate { | ||
732 | self.module(db).krate() | ||
733 | } | ||
734 | |||
735 | pub fn name(self, db: &dyn HirDatabase) -> Name { | 848 | pub fn name(self, db: &dyn HirDatabase) -> Name { |
736 | match self { | 849 | match self { |
737 | Adt::Struct(s) => s.name(db), | 850 | Adt::Struct(s) => s.name(db), |
@@ -820,10 +933,6 @@ impl Function { | |||
820 | self.id.lookup(db.upcast()).module(db.upcast()).into() | 933 | self.id.lookup(db.upcast()).module(db.upcast()).into() |
821 | } | 934 | } |
822 | 935 | ||
823 | pub fn krate(self, db: &dyn HirDatabase) -> Option<Crate> { | ||
824 | Some(self.module(db).krate()) | ||
825 | } | ||
826 | |||
827 | pub fn name(self, db: &dyn HirDatabase) -> Name { | 936 | pub fn name(self, db: &dyn HirDatabase) -> Name { |
828 | db.function_data(self.id).name.clone() | 937 | db.function_data(self.id).name.clone() |
829 | } | 938 | } |
@@ -880,7 +989,37 @@ impl Function { | |||
880 | 989 | ||
881 | pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) { | 990 | pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) { |
882 | let krate = self.module(db).id.krate(); | 991 | let krate = self.module(db).id.krate(); |
883 | hir_def::diagnostics::validate_body(db.upcast(), self.id.into(), sink); | 992 | |
993 | let source_map = db.body_with_source_map(self.id.into()).1; | ||
994 | for diag in source_map.diagnostics() { | ||
995 | match diag { | ||
996 | BodyDiagnostic::InactiveCode { node, cfg, opts } => sink.push(InactiveCode { | ||
997 | file: node.file_id, | ||
998 | node: node.value.clone(), | ||
999 | cfg: cfg.clone(), | ||
1000 | opts: opts.clone(), | ||
1001 | }), | ||
1002 | BodyDiagnostic::MacroError { node, message } => sink.push(MacroError { | ||
1003 | file: node.file_id, | ||
1004 | node: node.value.clone().into(), | ||
1005 | message: message.to_string(), | ||
1006 | }), | ||
1007 | BodyDiagnostic::UnresolvedProcMacro { node } => sink.push(UnresolvedProcMacro { | ||
1008 | file: node.file_id, | ||
1009 | node: node.value.clone().into(), | ||
1010 | precise_location: None, | ||
1011 | macro_name: None, | ||
1012 | }), | ||
1013 | BodyDiagnostic::UnresolvedMacroCall { node, path } => { | ||
1014 | sink.push(UnresolvedMacroCall { | ||
1015 | file: node.file_id, | ||
1016 | node: node.value.clone(), | ||
1017 | path: path.clone(), | ||
1018 | }) | ||
1019 | } | ||
1020 | } | ||
1021 | } | ||
1022 | |||
884 | hir_ty::diagnostics::validate_module_item(db, krate, self.id.into(), sink); | 1023 | hir_ty::diagnostics::validate_module_item(db, krate, self.id.into(), sink); |
885 | hir_ty::diagnostics::validate_body(db, self.id.into(), sink); | 1024 | hir_ty::diagnostics::validate_body(db, self.id.into(), sink); |
886 | } | 1025 | } |
@@ -1013,10 +1152,6 @@ impl Const { | |||
1013 | Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } | 1152 | Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } |
1014 | } | 1153 | } |
1015 | 1154 | ||
1016 | pub fn krate(self, db: &dyn HirDatabase) -> Option<Crate> { | ||
1017 | Some(self.module(db).krate()) | ||
1018 | } | ||
1019 | |||
1020 | pub fn name(self, db: &dyn HirDatabase) -> Option<Name> { | 1155 | pub fn name(self, db: &dyn HirDatabase) -> Option<Name> { |
1021 | db.const_data(self.id).name.clone() | 1156 | db.const_data(self.id).name.clone() |
1022 | } | 1157 | } |
@@ -1044,10 +1179,6 @@ impl Static { | |||
1044 | Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } | 1179 | Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } |
1045 | } | 1180 | } |
1046 | 1181 | ||
1047 | pub fn krate(self, db: &dyn HirDatabase) -> Option<Crate> { | ||
1048 | Some(self.module(db).krate()) | ||
1049 | } | ||
1050 | |||
1051 | pub fn name(self, db: &dyn HirDatabase) -> Option<Name> { | 1182 | pub fn name(self, db: &dyn HirDatabase) -> Option<Name> { |
1052 | db.static_data(self.id).name.clone() | 1183 | db.static_data(self.id).name.clone() |
1053 | } | 1184 | } |
@@ -1111,10 +1242,6 @@ impl TypeAlias { | |||
1111 | Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } | 1242 | Module { id: self.id.lookup(db.upcast()).module(db.upcast()) } |
1112 | } | 1243 | } |
1113 | 1244 | ||
1114 | pub fn krate(self, db: &dyn HirDatabase) -> Crate { | ||
1115 | self.module(db).krate() | ||
1116 | } | ||
1117 | |||
1118 | pub fn type_ref(self, db: &dyn HirDatabase) -> Option<TypeRef> { | 1245 | pub fn type_ref(self, db: &dyn HirDatabase) -> Option<TypeRef> { |
1119 | db.type_alias_data(self.id).type_ref.as_deref().cloned() | 1246 | db.type_alias_data(self.id).type_ref.as_deref().cloned() |
1120 | } | 1247 | } |
@@ -1155,10 +1282,16 @@ impl BuiltinType { | |||
1155 | 1282 | ||
1156 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | 1283 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
1157 | pub enum MacroKind { | 1284 | pub enum MacroKind { |
1285 | /// `macro_rules!` or Macros 2.0 macro. | ||
1158 | Declarative, | 1286 | Declarative, |
1159 | ProcMacro, | 1287 | /// A built-in or custom derive. |
1160 | Derive, | 1288 | Derive, |
1289 | /// A built-in function-like macro. | ||
1161 | BuiltIn, | 1290 | BuiltIn, |
1291 | /// A procedural attribute macro. | ||
1292 | Attr, | ||
1293 | /// A function-like procedural macro. | ||
1294 | ProcMacro, | ||
1162 | } | 1295 | } |
1163 | 1296 | ||
1164 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | 1297 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
@@ -1188,11 +1321,13 @@ impl MacroDef { | |||
1188 | pub fn kind(&self) -> MacroKind { | 1321 | pub fn kind(&self) -> MacroKind { |
1189 | match self.id.kind { | 1322 | match self.id.kind { |
1190 | MacroDefKind::Declarative(_) => MacroKind::Declarative, | 1323 | MacroDefKind::Declarative(_) => MacroKind::Declarative, |
1191 | MacroDefKind::BuiltIn(_, _) => MacroKind::BuiltIn, | 1324 | MacroDefKind::BuiltIn(_, _) | MacroDefKind::BuiltInEager(_, _) => MacroKind::BuiltIn, |
1192 | MacroDefKind::BuiltInDerive(_, _) => MacroKind::Derive, | 1325 | MacroDefKind::BuiltInDerive(_, _) => MacroKind::Derive, |
1193 | MacroDefKind::BuiltInEager(_, _) => MacroKind::BuiltIn, | 1326 | MacroDefKind::ProcMacro(_, base_db::ProcMacroKind::CustomDerive, _) => { |
1194 | // FIXME might be a derive | 1327 | MacroKind::Derive |
1195 | MacroDefKind::ProcMacro(_, _) => MacroKind::ProcMacro, | 1328 | } |
1329 | MacroDefKind::ProcMacro(_, base_db::ProcMacroKind::Attr, _) => MacroKind::Attr, | ||
1330 | MacroDefKind::ProcMacro(_, base_db::ProcMacroKind::FuncLike, _) => MacroKind::ProcMacro, | ||
1196 | } | 1331 | } |
1197 | } | 1332 | } |
1198 | } | 1333 | } |
@@ -1666,10 +1801,6 @@ impl Impl { | |||
1666 | self.id.lookup(db.upcast()).container.into() | 1801 | self.id.lookup(db.upcast()).container.into() |
1667 | } | 1802 | } |
1668 | 1803 | ||
1669 | pub fn krate(self, db: &dyn HirDatabase) -> Crate { | ||
1670 | Crate { id: self.module(db).id.krate() } | ||
1671 | } | ||
1672 | |||
1673 | pub fn is_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> { | 1804 | pub fn is_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> { |
1674 | let src = self.source(db)?; | 1805 | let src = self.source(db)?; |
1675 | let item = src.file_id.is_builtin_derive(db.upcast())?; | 1806 | let item = src.file_id.is_builtin_derive(db.upcast())?; |
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 3aa467e3c..8d3c43d08 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs | |||
@@ -120,10 +120,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
120 | pub fn speculative_expand( | 120 | pub fn speculative_expand( |
121 | &self, | 121 | &self, |
122 | actual_macro_call: &ast::MacroCall, | 122 | actual_macro_call: &ast::MacroCall, |
123 | hypothetical_args: &ast::TokenTree, | 123 | speculative_args: &ast::TokenTree, |
124 | token_to_map: SyntaxToken, | 124 | token_to_map: SyntaxToken, |
125 | ) -> Option<(SyntaxNode, SyntaxToken)> { | 125 | ) -> Option<(SyntaxNode, SyntaxToken)> { |
126 | self.imp.speculative_expand(actual_macro_call, hypothetical_args, token_to_map) | 126 | self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map) |
127 | } | 127 | } |
128 | 128 | ||
129 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { | 129 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { |
@@ -335,7 +335,7 @@ impl<'db> SemanticsImpl<'db> { | |||
335 | fn speculative_expand( | 335 | fn speculative_expand( |
336 | &self, | 336 | &self, |
337 | actual_macro_call: &ast::MacroCall, | 337 | actual_macro_call: &ast::MacroCall, |
338 | hypothetical_args: &ast::TokenTree, | 338 | speculative_args: &ast::TokenTree, |
339 | token_to_map: SyntaxToken, | 339 | token_to_map: SyntaxToken, |
340 | ) -> Option<(SyntaxNode, SyntaxToken)> { | 340 | ) -> Option<(SyntaxNode, SyntaxToken)> { |
341 | let sa = self.analyze(actual_macro_call.syntax()); | 341 | let sa = self.analyze(actual_macro_call.syntax()); |
@@ -344,10 +344,10 @@ impl<'db> SemanticsImpl<'db> { | |||
344 | let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { | 344 | let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { |
345 | sa.resolver.resolve_path_as_macro(self.db.upcast(), &path) | 345 | sa.resolver.resolve_path_as_macro(self.db.upcast(), &path) |
346 | })?; | 346 | })?; |
347 | hir_expand::db::expand_hypothetical( | 347 | hir_expand::db::expand_speculative( |
348 | self.db.upcast(), | 348 | self.db.upcast(), |
349 | macro_call_id, | 349 | macro_call_id, |
350 | hypothetical_args, | 350 | speculative_args, |
351 | token_to_map, | 351 | token_to_map, |
352 | ) | 352 | ) |
353 | } | 353 | } |
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 3f940124c..37a050415 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs | |||
@@ -308,7 +308,11 @@ impl SourceAnalyzer { | |||
308 | } | 308 | } |
309 | } | 309 | } |
310 | 310 | ||
311 | resolve_hir_path_(db, &self.resolver, &hir_path, prefer_value_ns) | 311 | if parent().map_or(false, |it| ast::Visibility::can_cast(it.kind())) { |
312 | resolve_hir_path_qualifier(db, &self.resolver, &hir_path) | ||
313 | } else { | ||
314 | resolve_hir_path_(db, &self.resolver, &hir_path, prefer_value_ns) | ||
315 | } | ||
312 | } | 316 | } |
313 | 317 | ||
314 | pub(crate) fn record_literal_missing_fields( | 318 | pub(crate) fn record_literal_missing_fields( |
diff --git a/crates/hir_def/src/attr.rs b/crates/hir_def/src/attr.rs index 89a1ea770..385ba8c80 100644 --- a/crates/hir_def/src/attr.rs +++ b/crates/hir_def/src/attr.rs | |||
@@ -105,7 +105,7 @@ impl RawAttrs { | |||
105 | Either::Left(attr) => Attr::from_src(db, attr, hygiene, id), | 105 | Either::Left(attr) => Attr::from_src(db, attr, hygiene, id), |
106 | Either::Right(comment) => comment.doc_comment().map(|doc| Attr { | 106 | Either::Right(comment) => comment.doc_comment().map(|doc| Attr { |
107 | id, | 107 | id, |
108 | input: Some(AttrInput::Literal(SmolStr::new(doc))), | 108 | input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))), |
109 | path: Interned::new(ModPath::from(hir_expand::name!(doc))), | 109 | path: Interned::new(ModPath::from(hir_expand::name!(doc))), |
110 | }), | 110 | }), |
111 | }) | 111 | }) |
@@ -151,7 +151,7 @@ impl RawAttrs { | |||
151 | return smallvec![attr.clone()]; | 151 | return smallvec![attr.clone()]; |
152 | } | 152 | } |
153 | 153 | ||
154 | let subtree = match &attr.input { | 154 | let subtree = match attr.input.as_deref() { |
155 | Some(AttrInput::TokenTree(it)) => it, | 155 | Some(AttrInput::TokenTree(it)) => it, |
156 | _ => return smallvec![attr.clone()], | 156 | _ => return smallvec![attr.clone()], |
157 | }; | 157 | }; |
@@ -251,7 +251,7 @@ impl Attrs { | |||
251 | } | 251 | } |
252 | 252 | ||
253 | pub fn docs(&self) -> Option<Documentation> { | 253 | pub fn docs(&self) -> Option<Documentation> { |
254 | let docs = self.by_key("doc").attrs().flat_map(|attr| match attr.input.as_ref()? { | 254 | let docs = self.by_key("doc").attrs().flat_map(|attr| match attr.input.as_deref()? { |
255 | AttrInput::Literal(s) => Some(s), | 255 | AttrInput::Literal(s) => Some(s), |
256 | AttrInput::TokenTree(_) => None, | 256 | AttrInput::TokenTree(_) => None, |
257 | }); | 257 | }); |
@@ -454,7 +454,7 @@ impl AttrsWithOwner { | |||
454 | db: &dyn DefDatabase, | 454 | db: &dyn DefDatabase, |
455 | ) -> Option<(Documentation, DocsRangeMap)> { | 455 | ) -> Option<(Documentation, DocsRangeMap)> { |
456 | // FIXME: code duplication in `docs` above | 456 | // FIXME: code duplication in `docs` above |
457 | let docs = self.by_key("doc").attrs().flat_map(|attr| match attr.input.as_ref()? { | 457 | let docs = self.by_key("doc").attrs().flat_map(|attr| match attr.input.as_deref()? { |
458 | AttrInput::Literal(s) => Some((s, attr.id)), | 458 | AttrInput::Literal(s) => Some((s, attr.id)), |
459 | AttrInput::TokenTree(_) => None, | 459 | AttrInput::TokenTree(_) => None, |
460 | }); | 460 | }); |
@@ -637,10 +637,10 @@ pub(crate) struct AttrId { | |||
637 | pub struct Attr { | 637 | pub struct Attr { |
638 | pub(crate) id: AttrId, | 638 | pub(crate) id: AttrId, |
639 | pub(crate) path: Interned<ModPath>, | 639 | pub(crate) path: Interned<ModPath>, |
640 | pub(crate) input: Option<AttrInput>, | 640 | pub(crate) input: Option<Interned<AttrInput>>, |
641 | } | 641 | } |
642 | 642 | ||
643 | #[derive(Debug, Clone, PartialEq, Eq)] | 643 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
644 | pub enum AttrInput { | 644 | pub enum AttrInput { |
645 | /// `#[attr = "string"]` | 645 | /// `#[attr = "string"]` |
646 | Literal(SmolStr), | 646 | Literal(SmolStr), |
@@ -670,9 +670,9 @@ impl Attr { | |||
670 | ast::LiteralKind::String(string) => string.value()?.into(), | 670 | ast::LiteralKind::String(string) => string.value()?.into(), |
671 | _ => lit.syntax().first_token()?.text().trim_matches('"').into(), | 671 | _ => lit.syntax().first_token()?.text().trim_matches('"').into(), |
672 | }; | 672 | }; |
673 | Some(AttrInput::Literal(value)) | 673 | Some(Interned::new(AttrInput::Literal(value))) |
674 | } else if let Some(tt) = ast.token_tree() { | 674 | } else if let Some(tt) = ast.token_tree() { |
675 | Some(AttrInput::TokenTree(ast_to_token_tree(&tt).0)) | 675 | Some(Interned::new(AttrInput::TokenTree(ast_to_token_tree(&tt).0))) |
676 | } else { | 676 | } else { |
677 | None | 677 | None |
678 | }; | 678 | }; |
@@ -688,7 +688,7 @@ impl Attr { | |||
688 | return None; | 688 | return None; |
689 | } | 689 | } |
690 | 690 | ||
691 | match &self.input { | 691 | match self.input.as_deref() { |
692 | Some(AttrInput::TokenTree(args)) => { | 692 | Some(AttrInput::TokenTree(args)) => { |
693 | let mut counter = 0; | 693 | let mut counter = 0; |
694 | let paths = args | 694 | let paths = args |
@@ -720,7 +720,7 @@ impl Attr { | |||
720 | } | 720 | } |
721 | 721 | ||
722 | pub fn string_value(&self) -> Option<&SmolStr> { | 722 | pub fn string_value(&self) -> Option<&SmolStr> { |
723 | match self.input.as_ref()? { | 723 | match self.input.as_deref()? { |
724 | AttrInput::Literal(it) => Some(it), | 724 | AttrInput::Literal(it) => Some(it), |
725 | _ => None, | 725 | _ => None, |
726 | } | 726 | } |
@@ -735,14 +735,14 @@ pub struct AttrQuery<'a> { | |||
735 | 735 | ||
736 | impl<'a> AttrQuery<'a> { | 736 | impl<'a> AttrQuery<'a> { |
737 | pub fn tt_values(self) -> impl Iterator<Item = &'a Subtree> { | 737 | pub fn tt_values(self) -> impl Iterator<Item = &'a Subtree> { |
738 | self.attrs().filter_map(|attr| match attr.input.as_ref()? { | 738 | self.attrs().filter_map(|attr| match attr.input.as_deref()? { |
739 | AttrInput::TokenTree(it) => Some(it), | 739 | AttrInput::TokenTree(it) => Some(it), |
740 | _ => None, | 740 | _ => None, |
741 | }) | 741 | }) |
742 | } | 742 | } |
743 | 743 | ||
744 | pub fn string_value(self) -> Option<&'a SmolStr> { | 744 | pub fn string_value(self) -> Option<&'a SmolStr> { |
745 | self.attrs().find_map(|attr| match attr.input.as_ref()? { | 745 | self.attrs().find_map(|attr| match attr.input.as_deref()? { |
746 | AttrInput::Literal(it) => Some(it), | 746 | AttrInput::Literal(it) => Some(it), |
747 | _ => None, | 747 | _ => None, |
748 | }) | 748 | }) |
diff --git a/crates/hir_def/src/body.rs b/crates/hir_def/src/body.rs index 98b485b60..c521879c8 100644 --- a/crates/hir_def/src/body.rs +++ b/crates/hir_def/src/body.rs | |||
@@ -1,7 +1,6 @@ | |||
1 | //! Defines `Body`: a lowered representation of bodies of functions, statics and | 1 | //! Defines `Body`: a lowered representation of bodies of functions, statics and |
2 | //! consts. | 2 | //! consts. |
3 | mod lower; | 3 | mod lower; |
4 | mod diagnostics; | ||
5 | #[cfg(test)] | 4 | #[cfg(test)] |
6 | mod tests; | 5 | mod tests; |
7 | pub mod scope; | 6 | pub mod scope; |
@@ -9,17 +8,16 @@ pub mod scope; | |||
9 | use std::{mem, ops::Index, sync::Arc}; | 8 | use std::{mem, ops::Index, sync::Arc}; |
10 | 9 | ||
11 | use base_db::CrateId; | 10 | use base_db::CrateId; |
12 | use cfg::CfgOptions; | 11 | use cfg::{CfgExpr, CfgOptions}; |
13 | use drop_bomb::DropBomb; | 12 | use drop_bomb::DropBomb; |
14 | use either::Either; | 13 | use either::Either; |
15 | use hir_expand::{ | 14 | use hir_expand::{ |
16 | ast_id_map::AstIdMap, diagnostics::DiagnosticSink, hygiene::Hygiene, AstId, ExpandResult, | 15 | ast_id_map::AstIdMap, hygiene::Hygiene, AstId, ExpandResult, HirFileId, InFile, MacroDefId, |
17 | HirFileId, InFile, MacroDefId, | ||
18 | }; | 16 | }; |
19 | use la_arena::{Arena, ArenaMap}; | 17 | use la_arena::{Arena, ArenaMap}; |
20 | use profile::Count; | 18 | use profile::Count; |
21 | use rustc_hash::FxHashMap; | 19 | use rustc_hash::FxHashMap; |
22 | use syntax::{ast, AstNode, AstPtr}; | 20 | use syntax::{ast, AstNode, AstPtr, SyntaxNodePtr}; |
23 | 21 | ||
24 | use crate::{ | 22 | use crate::{ |
25 | attr::{Attrs, RawAttrs}, | 23 | attr::{Attrs, RawAttrs}, |
@@ -273,12 +271,20 @@ pub struct BodySourceMap { | |||
273 | 271 | ||
274 | /// Diagnostics accumulated during body lowering. These contain `AstPtr`s and so are stored in | 272 | /// Diagnostics accumulated during body lowering. These contain `AstPtr`s and so are stored in |
275 | /// the source map (since they're just as volatile). | 273 | /// the source map (since they're just as volatile). |
276 | diagnostics: Vec<diagnostics::BodyDiagnostic>, | 274 | diagnostics: Vec<BodyDiagnostic>, |
277 | } | 275 | } |
278 | 276 | ||
279 | #[derive(Default, Debug, Eq, PartialEq, Clone, Copy)] | 277 | #[derive(Default, Debug, Eq, PartialEq, Clone, Copy)] |
280 | pub struct SyntheticSyntax; | 278 | pub struct SyntheticSyntax; |
281 | 279 | ||
280 | #[derive(Debug, Eq, PartialEq)] | ||
281 | pub enum BodyDiagnostic { | ||
282 | InactiveCode { node: InFile<SyntaxNodePtr>, cfg: CfgExpr, opts: CfgOptions }, | ||
283 | MacroError { node: InFile<AstPtr<ast::MacroCall>>, message: String }, | ||
284 | UnresolvedProcMacro { node: InFile<AstPtr<ast::MacroCall>> }, | ||
285 | UnresolvedMacroCall { node: InFile<AstPtr<ast::MacroCall>>, path: ModPath }, | ||
286 | } | ||
287 | |||
282 | impl Body { | 288 | impl Body { |
283 | pub(crate) fn body_with_source_map_query( | 289 | pub(crate) fn body_with_source_map_query( |
284 | db: &dyn DefDatabase, | 290 | db: &dyn DefDatabase, |
@@ -416,9 +422,8 @@ impl BodySourceMap { | |||
416 | self.field_map.get(&src).cloned() | 422 | self.field_map.get(&src).cloned() |
417 | } | 423 | } |
418 | 424 | ||
419 | pub(crate) fn add_diagnostics(&self, _db: &dyn DefDatabase, sink: &mut DiagnosticSink<'_>) { | 425 | /// Get a reference to the body source map's diagnostics. |
420 | for diag in &self.diagnostics { | 426 | pub fn diagnostics(&self) -> &[BodyDiagnostic] { |
421 | diag.add_to(sink); | 427 | &self.diagnostics |
422 | } | ||
423 | } | 428 | } |
424 | } | 429 | } |
diff --git a/crates/hir_def/src/body/diagnostics.rs b/crates/hir_def/src/body/diagnostics.rs deleted file mode 100644 index f6992c9a8..000000000 --- a/crates/hir_def/src/body/diagnostics.rs +++ /dev/null | |||
@@ -1,32 +0,0 @@ | |||
1 | //! Diagnostics emitted during body lowering. | ||
2 | |||
3 | use hir_expand::diagnostics::DiagnosticSink; | ||
4 | |||
5 | use crate::diagnostics::{InactiveCode, MacroError, UnresolvedMacroCall, UnresolvedProcMacro}; | ||
6 | |||
7 | #[derive(Debug, Eq, PartialEq)] | ||
8 | pub(crate) enum BodyDiagnostic { | ||
9 | InactiveCode(InactiveCode), | ||
10 | MacroError(MacroError), | ||
11 | UnresolvedProcMacro(UnresolvedProcMacro), | ||
12 | UnresolvedMacroCall(UnresolvedMacroCall), | ||
13 | } | ||
14 | |||
15 | impl BodyDiagnostic { | ||
16 | pub(crate) fn add_to(&self, sink: &mut DiagnosticSink<'_>) { | ||
17 | match self { | ||
18 | BodyDiagnostic::InactiveCode(diag) => { | ||
19 | sink.push(diag.clone()); | ||
20 | } | ||
21 | BodyDiagnostic::MacroError(diag) => { | ||
22 | sink.push(diag.clone()); | ||
23 | } | ||
24 | BodyDiagnostic::UnresolvedProcMacro(diag) => { | ||
25 | sink.push(diag.clone()); | ||
26 | } | ||
27 | BodyDiagnostic::UnresolvedMacroCall(diag) => { | ||
28 | sink.push(diag.clone()); | ||
29 | } | ||
30 | } | ||
31 | } | ||
32 | } | ||
diff --git a/crates/hir_def/src/body/lower.rs b/crates/hir_def/src/body/lower.rs index 2a7e0205f..da1fdac33 100644 --- a/crates/hir_def/src/body/lower.rs +++ b/crates/hir_def/src/body/lower.rs | |||
@@ -8,7 +8,7 @@ use hir_expand::{ | |||
8 | ast_id_map::{AstIdMap, FileAstId}, | 8 | ast_id_map::{AstIdMap, FileAstId}, |
9 | hygiene::Hygiene, | 9 | hygiene::Hygiene, |
10 | name::{name, AsName, Name}, | 10 | name::{name, AsName, Name}, |
11 | ExpandError, HirFileId, | 11 | ExpandError, HirFileId, InFile, |
12 | }; | 12 | }; |
13 | use la_arena::Arena; | 13 | use la_arena::Arena; |
14 | use profile::Count; | 14 | use profile::Count; |
@@ -23,9 +23,9 @@ use syntax::{ | |||
23 | use crate::{ | 23 | use crate::{ |
24 | adt::StructKind, | 24 | adt::StructKind, |
25 | body::{Body, BodySourceMap, Expander, LabelSource, PatPtr, SyntheticSyntax}, | 25 | body::{Body, BodySourceMap, Expander, LabelSource, PatPtr, SyntheticSyntax}, |
26 | body::{BodyDiagnostic, ExprSource, PatSource}, | ||
26 | builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint}, | 27 | builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint}, |
27 | db::DefDatabase, | 28 | db::DefDatabase, |
28 | diagnostics::{InactiveCode, MacroError, UnresolvedMacroCall, UnresolvedProcMacro}, | ||
29 | expr::{ | 29 | expr::{ |
30 | dummy_expr_id, ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Label, | 30 | dummy_expr_id, ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Label, |
31 | LabelId, Literal, LogicOp, MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, | 31 | LabelId, Literal, LogicOp, MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, |
@@ -38,8 +38,6 @@ use crate::{ | |||
38 | AdtId, BlockLoc, ModuleDefId, UnresolvedMacro, | 38 | AdtId, BlockLoc, ModuleDefId, UnresolvedMacro, |
39 | }; | 39 | }; |
40 | 40 | ||
41 | use super::{diagnostics::BodyDiagnostic, ExprSource, PatSource}; | ||
42 | |||
43 | pub struct LowerCtx<'a> { | 41 | pub struct LowerCtx<'a> { |
44 | pub db: &'a dyn DefDatabase, | 42 | pub db: &'a dyn DefDatabase, |
45 | hygiene: Hygiene, | 43 | hygiene: Hygiene, |
@@ -592,13 +590,10 @@ impl ExprCollector<'_> { | |||
592 | let res = match res { | 590 | let res = match res { |
593 | Ok(res) => res, | 591 | Ok(res) => res, |
594 | Err(UnresolvedMacro { path }) => { | 592 | Err(UnresolvedMacro { path }) => { |
595 | self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedMacroCall( | 593 | self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedMacroCall { |
596 | UnresolvedMacroCall { | 594 | node: InFile::new(outer_file, syntax_ptr), |
597 | file: outer_file, | 595 | path, |
598 | node: syntax_ptr.cast().unwrap(), | 596 | }); |
599 | path, | ||
600 | }, | ||
601 | )); | ||
602 | collector(self, None); | 597 | collector(self, None); |
603 | return; | 598 | return; |
604 | } | 599 | } |
@@ -606,21 +601,15 @@ impl ExprCollector<'_> { | |||
606 | 601 | ||
607 | match &res.err { | 602 | match &res.err { |
608 | Some(ExpandError::UnresolvedProcMacro) => { | 603 | Some(ExpandError::UnresolvedProcMacro) => { |
609 | self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedProcMacro( | 604 | self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedProcMacro { |
610 | UnresolvedProcMacro { | 605 | node: InFile::new(outer_file, syntax_ptr), |
611 | file: outer_file, | 606 | }); |
612 | node: syntax_ptr.into(), | ||
613 | precise_location: None, | ||
614 | macro_name: None, | ||
615 | }, | ||
616 | )); | ||
617 | } | 607 | } |
618 | Some(err) => { | 608 | Some(err) => { |
619 | self.source_map.diagnostics.push(BodyDiagnostic::MacroError(MacroError { | 609 | self.source_map.diagnostics.push(BodyDiagnostic::MacroError { |
620 | file: outer_file, | 610 | node: InFile::new(outer_file, syntax_ptr), |
621 | node: syntax_ptr.into(), | ||
622 | message: err.to_string(), | 611 | message: err.to_string(), |
623 | })); | 612 | }); |
624 | } | 613 | } |
625 | None => {} | 614 | None => {} |
626 | } | 615 | } |
@@ -945,12 +934,14 @@ impl ExprCollector<'_> { | |||
945 | return Some(()); | 934 | return Some(()); |
946 | } | 935 | } |
947 | 936 | ||
948 | self.source_map.diagnostics.push(BodyDiagnostic::InactiveCode(InactiveCode { | 937 | self.source_map.diagnostics.push(BodyDiagnostic::InactiveCode { |
949 | file: self.expander.current_file_id, | 938 | node: InFile::new( |
950 | node: SyntaxNodePtr::new(owner.syntax()), | 939 | self.expander.current_file_id, |
940 | SyntaxNodePtr::new(owner.syntax()), | ||
941 | ), | ||
951 | cfg, | 942 | cfg, |
952 | opts: self.expander.cfg_options().clone(), | 943 | opts: self.expander.cfg_options().clone(), |
953 | })); | 944 | }); |
954 | 945 | ||
955 | None | 946 | None |
956 | } | 947 | } |
diff --git a/crates/hir_def/src/body/tests.rs b/crates/hir_def/src/body/tests.rs index 3e8f16306..d4fae05a6 100644 --- a/crates/hir_def/src/body/tests.rs +++ b/crates/hir_def/src/body/tests.rs | |||
@@ -96,26 +96,26 @@ fn f() { | |||
96 | // The three g̶e̶n̶d̶e̶r̶s̶ statements: | 96 | // The three g̶e̶n̶d̶e̶r̶s̶ statements: |
97 | 97 | ||
98 | #[cfg(a)] fn f() {} // Item statement | 98 | #[cfg(a)] fn f() {} // Item statement |
99 | //^^^^^^^^^^^^^^^^^^^ code is inactive due to #[cfg] directives: a is disabled | 99 | //^^^^^^^^^^^^^^^^^^^ InactiveCode |
100 | #[cfg(a)] {} // Expression statement | 100 | #[cfg(a)] {} // Expression statement |
101 | //^^^^^^^^^^^^ code is inactive due to #[cfg] directives: a is disabled | 101 | //^^^^^^^^^^^^ InactiveCode |
102 | #[cfg(a)] let x = 0; // let statement | 102 | #[cfg(a)] let x = 0; // let statement |
103 | //^^^^^^^^^^^^^^^^^^^^ code is inactive due to #[cfg] directives: a is disabled | 103 | //^^^^^^^^^^^^^^^^^^^^ InactiveCode |
104 | 104 | ||
105 | abc(#[cfg(a)] 0); | 105 | abc(#[cfg(a)] 0); |
106 | //^^^^^^^^^^^ code is inactive due to #[cfg] directives: a is disabled | 106 | //^^^^^^^^^^^ InactiveCode |
107 | let x = Struct { | 107 | let x = Struct { |
108 | #[cfg(a)] f: 0, | 108 | #[cfg(a)] f: 0, |
109 | //^^^^^^^^^^^^^^ code is inactive due to #[cfg] directives: a is disabled | 109 | //^^^^^^^^^^^^^^ InactiveCode |
110 | }; | 110 | }; |
111 | match () { | 111 | match () { |
112 | () => (), | 112 | () => (), |
113 | #[cfg(a)] () => (), | 113 | #[cfg(a)] () => (), |
114 | //^^^^^^^^^^^^^^^^^^ code is inactive due to #[cfg] directives: a is disabled | 114 | //^^^^^^^^^^^^^^^^^^ InactiveCode |
115 | } | 115 | } |
116 | 116 | ||
117 | #[cfg(a)] 0 // Trailing expression of block | 117 | #[cfg(a)] 0 // Trailing expression of block |
118 | //^^^^^^^^^^^ code is inactive due to #[cfg] directives: a is disabled | 118 | //^^^^^^^^^^^ InactiveCode |
119 | } | 119 | } |
120 | ", | 120 | ", |
121 | ); | 121 | ); |
@@ -188,7 +188,7 @@ fn unresolved_macro_diag() { | |||
188 | r#" | 188 | r#" |
189 | fn f() { | 189 | fn f() { |
190 | m!(); | 190 | m!(); |
191 | //^^^^ unresolved macro `m!` | 191 | //^^^^ UnresolvedMacroCall |
192 | } | 192 | } |
193 | "#, | 193 | "#, |
194 | ); | 194 | ); |
diff --git a/crates/hir_def/src/data.rs b/crates/hir_def/src/data.rs index 135a6698e..d2bb381be 100644 --- a/crates/hir_def/src/data.rs +++ b/crates/hir_def/src/data.rs | |||
@@ -22,6 +22,7 @@ pub struct FunctionData { | |||
22 | pub name: Name, | 22 | pub name: Name, |
23 | pub params: Vec<Interned<TypeRef>>, | 23 | pub params: Vec<Interned<TypeRef>>, |
24 | pub ret_type: Interned<TypeRef>, | 24 | pub ret_type: Interned<TypeRef>, |
25 | pub async_ret_type: Option<Interned<TypeRef>>, | ||
25 | pub attrs: Attrs, | 26 | pub attrs: Attrs, |
26 | pub visibility: RawVisibility, | 27 | pub visibility: RawVisibility, |
27 | pub abi: Option<Interned<str>>, | 28 | pub abi: Option<Interned<str>>, |
@@ -63,6 +64,7 @@ impl FunctionData { | |||
63 | }) | 64 | }) |
64 | .collect(), | 65 | .collect(), |
65 | ret_type: func.ret_type.clone(), | 66 | ret_type: func.ret_type.clone(), |
67 | async_ret_type: func.async_ret_type.clone(), | ||
66 | attrs: item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()), | 68 | attrs: item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()), |
67 | visibility: item_tree[func.visibility].clone(), | 69 | visibility: item_tree[func.visibility].clone(), |
68 | abi: func.abi.clone(), | 70 | abi: func.abi.clone(), |
@@ -112,7 +114,7 @@ pub struct TypeAliasData { | |||
112 | pub visibility: RawVisibility, | 114 | pub visibility: RawVisibility, |
113 | pub is_extern: bool, | 115 | pub is_extern: bool, |
114 | /// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl). | 116 | /// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl). |
115 | pub bounds: Vec<TypeBound>, | 117 | pub bounds: Vec<Interned<TypeBound>>, |
116 | } | 118 | } |
117 | 119 | ||
118 | impl TypeAliasData { | 120 | impl TypeAliasData { |
@@ -141,7 +143,6 @@ pub struct TraitData { | |||
141 | pub is_auto: bool, | 143 | pub is_auto: bool, |
142 | pub is_unsafe: bool, | 144 | pub is_unsafe: bool, |
143 | pub visibility: RawVisibility, | 145 | pub visibility: RawVisibility, |
144 | pub bounds: Box<[TypeBound]>, | ||
145 | } | 146 | } |
146 | 147 | ||
147 | impl TraitData { | 148 | impl TraitData { |
@@ -155,7 +156,6 @@ impl TraitData { | |||
155 | let module_id = tr_loc.container; | 156 | let module_id = tr_loc.container; |
156 | let container = AssocContainerId::TraitId(tr); | 157 | let container = AssocContainerId::TraitId(tr); |
157 | let visibility = item_tree[tr_def.visibility].clone(); | 158 | let visibility = item_tree[tr_def.visibility].clone(); |
158 | let bounds = tr_def.bounds.clone(); | ||
159 | let mut expander = Expander::new(db, tr_loc.id.file_id(), module_id); | 159 | let mut expander = Expander::new(db, tr_loc.id.file_id(), module_id); |
160 | 160 | ||
161 | let items = collect_items( | 161 | let items = collect_items( |
@@ -168,7 +168,7 @@ impl TraitData { | |||
168 | 100, | 168 | 100, |
169 | ); | 169 | ); |
170 | 170 | ||
171 | Arc::new(TraitData { name, items, is_auto, is_unsafe, visibility, bounds }) | 171 | Arc::new(TraitData { name, items, is_auto, is_unsafe, visibility }) |
172 | } | 172 | } |
173 | 173 | ||
174 | pub fn associated_types(&self) -> impl Iterator<Item = TypeAliasId> + '_ { | 174 | pub fn associated_types(&self) -> impl Iterator<Item = TypeAliasId> + '_ { |
diff --git a/crates/hir_def/src/diagnostics.rs b/crates/hir_def/src/diagnostics.rs deleted file mode 100644 index a71ae2668..000000000 --- a/crates/hir_def/src/diagnostics.rs +++ /dev/null | |||
@@ -1,227 +0,0 @@ | |||
1 | //! Diagnostics produced by `hir_def`. | ||
2 | |||
3 | use std::any::Any; | ||
4 | use stdx::format_to; | ||
5 | |||
6 | use cfg::{CfgExpr, CfgOptions, DnfExpr}; | ||
7 | use hir_expand::diagnostics::{Diagnostic, DiagnosticCode, DiagnosticSink}; | ||
8 | use hir_expand::{HirFileId, InFile}; | ||
9 | use syntax::{ast, AstPtr, SyntaxNodePtr, TextRange}; | ||
10 | |||
11 | use crate::{db::DefDatabase, path::ModPath, DefWithBodyId}; | ||
12 | |||
13 | pub fn validate_body(db: &dyn DefDatabase, owner: DefWithBodyId, sink: &mut DiagnosticSink<'_>) { | ||
14 | let source_map = db.body_with_source_map(owner).1; | ||
15 | source_map.add_diagnostics(db, sink); | ||
16 | } | ||
17 | |||
18 | // Diagnostic: unresolved-module | ||
19 | // | ||
20 | // This diagnostic is triggered if rust-analyzer is unable to discover referred module. | ||
21 | #[derive(Debug)] | ||
22 | pub struct UnresolvedModule { | ||
23 | pub file: HirFileId, | ||
24 | pub decl: AstPtr<ast::Module>, | ||
25 | pub candidate: String, | ||
26 | } | ||
27 | |||
28 | impl Diagnostic for UnresolvedModule { | ||
29 | fn code(&self) -> DiagnosticCode { | ||
30 | DiagnosticCode("unresolved-module") | ||
31 | } | ||
32 | fn message(&self) -> String { | ||
33 | "unresolved module".to_string() | ||
34 | } | ||
35 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
36 | InFile::new(self.file, self.decl.clone().into()) | ||
37 | } | ||
38 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
39 | self | ||
40 | } | ||
41 | } | ||
42 | |||
43 | // Diagnostic: unresolved-extern-crate | ||
44 | // | ||
45 | // This diagnostic is triggered if rust-analyzer is unable to discover referred extern crate. | ||
46 | #[derive(Debug)] | ||
47 | pub struct UnresolvedExternCrate { | ||
48 | pub file: HirFileId, | ||
49 | pub item: AstPtr<ast::ExternCrate>, | ||
50 | } | ||
51 | |||
52 | impl Diagnostic for UnresolvedExternCrate { | ||
53 | fn code(&self) -> DiagnosticCode { | ||
54 | DiagnosticCode("unresolved-extern-crate") | ||
55 | } | ||
56 | fn message(&self) -> String { | ||
57 | "unresolved extern crate".to_string() | ||
58 | } | ||
59 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
60 | InFile::new(self.file, self.item.clone().into()) | ||
61 | } | ||
62 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
63 | self | ||
64 | } | ||
65 | } | ||
66 | |||
67 | // Diagnostic: unresolved-import | ||
68 | // | ||
69 | // This diagnostic is triggered if rust-analyzer is unable to discover imported module. | ||
70 | #[derive(Debug)] | ||
71 | pub struct UnresolvedImport { | ||
72 | pub file: HirFileId, | ||
73 | pub node: AstPtr<ast::UseTree>, | ||
74 | } | ||
75 | |||
76 | impl Diagnostic for UnresolvedImport { | ||
77 | fn code(&self) -> DiagnosticCode { | ||
78 | DiagnosticCode("unresolved-import") | ||
79 | } | ||
80 | fn message(&self) -> String { | ||
81 | "unresolved import".to_string() | ||
82 | } | ||
83 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
84 | InFile::new(self.file, self.node.clone().into()) | ||
85 | } | ||
86 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
87 | self | ||
88 | } | ||
89 | fn is_experimental(&self) -> bool { | ||
90 | // This currently results in false positives in the following cases: | ||
91 | // - `cfg_if!`-generated code in libstd (we don't load the sysroot correctly) | ||
92 | // - `core::arch` (we don't handle `#[path = "../<path>"]` correctly) | ||
93 | // - proc macros and/or proc macro generated code | ||
94 | true | ||
95 | } | ||
96 | } | ||
97 | |||
98 | // Diagnostic: unresolved-macro-call | ||
99 | // | ||
100 | // This diagnostic is triggered if rust-analyzer is unable to resolve the path to a | ||
101 | // macro in a macro invocation. | ||
102 | #[derive(Debug, Clone, Eq, PartialEq)] | ||
103 | pub struct UnresolvedMacroCall { | ||
104 | pub file: HirFileId, | ||
105 | pub node: AstPtr<ast::MacroCall>, | ||
106 | pub path: ModPath, | ||
107 | } | ||
108 | |||
109 | impl Diagnostic for UnresolvedMacroCall { | ||
110 | fn code(&self) -> DiagnosticCode { | ||
111 | DiagnosticCode("unresolved-macro-call") | ||
112 | } | ||
113 | fn message(&self) -> String { | ||
114 | format!("unresolved macro `{}!`", self.path) | ||
115 | } | ||
116 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
117 | InFile::new(self.file, self.node.clone().into()) | ||
118 | } | ||
119 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
120 | self | ||
121 | } | ||
122 | fn is_experimental(&self) -> bool { | ||
123 | true | ||
124 | } | ||
125 | } | ||
126 | |||
127 | // Diagnostic: inactive-code | ||
128 | // | ||
129 | // This diagnostic is shown for code with inactive `#[cfg]` attributes. | ||
130 | #[derive(Debug, Clone, Eq, PartialEq)] | ||
131 | pub struct InactiveCode { | ||
132 | pub file: HirFileId, | ||
133 | pub node: SyntaxNodePtr, | ||
134 | pub cfg: CfgExpr, | ||
135 | pub opts: CfgOptions, | ||
136 | } | ||
137 | |||
138 | impl Diagnostic for InactiveCode { | ||
139 | fn code(&self) -> DiagnosticCode { | ||
140 | DiagnosticCode("inactive-code") | ||
141 | } | ||
142 | fn message(&self) -> String { | ||
143 | let inactive = DnfExpr::new(self.cfg.clone()).why_inactive(&self.opts); | ||
144 | let mut buf = "code is inactive due to #[cfg] directives".to_string(); | ||
145 | |||
146 | if let Some(inactive) = inactive { | ||
147 | format_to!(buf, ": {}", inactive); | ||
148 | } | ||
149 | |||
150 | buf | ||
151 | } | ||
152 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
153 | InFile::new(self.file, self.node.clone()) | ||
154 | } | ||
155 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
156 | self | ||
157 | } | ||
158 | } | ||
159 | |||
160 | // Diagnostic: unresolved-proc-macro | ||
161 | // | ||
162 | // This diagnostic is shown when a procedural macro can not be found. This usually means that | ||
163 | // procedural macro support is simply disabled (and hence is only a weak hint instead of an error), | ||
164 | // but can also indicate project setup problems. | ||
165 | // | ||
166 | // If you are seeing a lot of "proc macro not expanded" warnings, you can add this option to the | ||
167 | // `rust-analyzer.diagnostics.disabled` list to prevent them from showing. Alternatively you can | ||
168 | // enable support for procedural macros (see `rust-analyzer.procMacro.enable`). | ||
169 | #[derive(Debug, Clone, Eq, PartialEq)] | ||
170 | pub struct UnresolvedProcMacro { | ||
171 | pub file: HirFileId, | ||
172 | pub node: SyntaxNodePtr, | ||
173 | /// If the diagnostic can be pinpointed more accurately than via `node`, this is the `TextRange` | ||
174 | /// to use instead. | ||
175 | pub precise_location: Option<TextRange>, | ||
176 | pub macro_name: Option<String>, | ||
177 | } | ||
178 | |||
179 | impl Diagnostic for UnresolvedProcMacro { | ||
180 | fn code(&self) -> DiagnosticCode { | ||
181 | DiagnosticCode("unresolved-proc-macro") | ||
182 | } | ||
183 | |||
184 | fn message(&self) -> String { | ||
185 | match &self.macro_name { | ||
186 | Some(name) => format!("proc macro `{}` not expanded", name), | ||
187 | None => "proc macro not expanded".to_string(), | ||
188 | } | ||
189 | } | ||
190 | |||
191 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
192 | InFile::new(self.file, self.node.clone()) | ||
193 | } | ||
194 | |||
195 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
196 | self | ||
197 | } | ||
198 | } | ||
199 | |||
200 | // Diagnostic: macro-error | ||
201 | // | ||
202 | // This diagnostic is shown for macro expansion errors. | ||
203 | #[derive(Debug, Clone, Eq, PartialEq)] | ||
204 | pub struct MacroError { | ||
205 | pub file: HirFileId, | ||
206 | pub node: SyntaxNodePtr, | ||
207 | pub message: String, | ||
208 | } | ||
209 | |||
210 | impl Diagnostic for MacroError { | ||
211 | fn code(&self) -> DiagnosticCode { | ||
212 | DiagnosticCode("macro-error") | ||
213 | } | ||
214 | fn message(&self) -> String { | ||
215 | self.message.clone() | ||
216 | } | ||
217 | fn display_source(&self) -> InFile<SyntaxNodePtr> { | ||
218 | InFile::new(self.file, self.node.clone()) | ||
219 | } | ||
220 | fn as_any(&self) -> &(dyn Any + Send + 'static) { | ||
221 | self | ||
222 | } | ||
223 | fn is_experimental(&self) -> bool { | ||
224 | // Newly added and not very well-tested, might contain false positives. | ||
225 | true | ||
226 | } | ||
227 | } | ||
diff --git a/crates/hir_def/src/generics.rs b/crates/hir_def/src/generics.rs index de5acced8..44d22b918 100644 --- a/crates/hir_def/src/generics.rs +++ b/crates/hir_def/src/generics.rs | |||
@@ -68,9 +68,19 @@ pub struct GenericParams { | |||
68 | /// associated type bindings like `Iterator<Item = u32>`. | 68 | /// associated type bindings like `Iterator<Item = u32>`. |
69 | #[derive(Clone, PartialEq, Eq, Debug, Hash)] | 69 | #[derive(Clone, PartialEq, Eq, Debug, Hash)] |
70 | pub enum WherePredicate { | 70 | pub enum WherePredicate { |
71 | TypeBound { target: WherePredicateTypeTarget, bound: TypeBound }, | 71 | TypeBound { |
72 | Lifetime { target: LifetimeRef, bound: LifetimeRef }, | 72 | target: WherePredicateTypeTarget, |
73 | ForLifetime { lifetimes: Box<[Name]>, target: WherePredicateTypeTarget, bound: TypeBound }, | 73 | bound: Interned<TypeBound>, |
74 | }, | ||
75 | Lifetime { | ||
76 | target: LifetimeRef, | ||
77 | bound: LifetimeRef, | ||
78 | }, | ||
79 | ForLifetime { | ||
80 | lifetimes: Box<[Name]>, | ||
81 | target: WherePredicateTypeTarget, | ||
82 | bound: Interned<TypeBound>, | ||
83 | }, | ||
74 | } | 84 | } |
75 | 85 | ||
76 | #[derive(Clone, PartialEq, Eq, Debug, Hash)] | 86 | #[derive(Clone, PartialEq, Eq, Debug, Hash)] |
@@ -339,11 +349,11 @@ impl GenericParams { | |||
339 | Some(hrtb_lifetimes) => WherePredicate::ForLifetime { | 349 | Some(hrtb_lifetimes) => WherePredicate::ForLifetime { |
340 | lifetimes: hrtb_lifetimes.clone(), | 350 | lifetimes: hrtb_lifetimes.clone(), |
341 | target: WherePredicateTypeTarget::TypeRef(Interned::new(type_ref)), | 351 | target: WherePredicateTypeTarget::TypeRef(Interned::new(type_ref)), |
342 | bound, | 352 | bound: Interned::new(bound), |
343 | }, | 353 | }, |
344 | None => WherePredicate::TypeBound { | 354 | None => WherePredicate::TypeBound { |
345 | target: WherePredicateTypeTarget::TypeRef(Interned::new(type_ref)), | 355 | target: WherePredicateTypeTarget::TypeRef(Interned::new(type_ref)), |
346 | bound, | 356 | bound: Interned::new(bound), |
347 | }, | 357 | }, |
348 | }, | 358 | }, |
349 | (Either::Right(lifetime), TypeBound::Lifetime(bound)) => { | 359 | (Either::Right(lifetime), TypeBound::Lifetime(bound)) => { |
diff --git a/crates/hir_def/src/intern.rs b/crates/hir_def/src/intern.rs index 5cc7f2df6..79ba970e7 100644 --- a/crates/hir_def/src/intern.rs +++ b/crates/hir_def/src/intern.rs | |||
@@ -216,7 +216,10 @@ pub use crate::_impl_internable as impl_internable; | |||
216 | impl_internable!( | 216 | impl_internable!( |
217 | crate::type_ref::TypeRef, | 217 | crate::type_ref::TypeRef, |
218 | crate::type_ref::TraitRef, | 218 | crate::type_ref::TraitRef, |
219 | crate::type_ref::TypeBound, | ||
219 | crate::path::ModPath, | 220 | crate::path::ModPath, |
221 | crate::path::GenericArgs, | ||
222 | crate::attr::AttrInput, | ||
220 | GenericParams, | 223 | GenericParams, |
221 | str, | 224 | str, |
222 | ); | 225 | ); |
diff --git a/crates/hir_def/src/item_tree.rs b/crates/hir_def/src/item_tree.rs index 4a5f44027..227337a8d 100644 --- a/crates/hir_def/src/item_tree.rs +++ b/crates/hir_def/src/item_tree.rs | |||
@@ -523,21 +523,38 @@ impl<N: ItemTreeNode> Index<FileItemTreeId<N>> for ItemTree { | |||
523 | } | 523 | } |
524 | } | 524 | } |
525 | 525 | ||
526 | /// A desugared `use` import. | ||
527 | #[derive(Debug, Clone, Eq, PartialEq)] | 526 | #[derive(Debug, Clone, Eq, PartialEq)] |
528 | pub struct Import { | 527 | pub struct Import { |
529 | pub path: Interned<ModPath>, | ||
530 | pub alias: Option<ImportAlias>, | ||
531 | pub visibility: RawVisibilityId, | 528 | pub visibility: RawVisibilityId, |
532 | pub is_glob: bool, | ||
533 | /// AST ID of the `use` item this import was derived from. Note that many `Import`s can map to | ||
534 | /// the same `use` item. | ||
535 | pub ast_id: FileAstId<ast::Use>, | 529 | pub ast_id: FileAstId<ast::Use>, |
536 | /// Index of this `Import` when the containing `Use` is visited via `ModPath::expand_use_item`. | 530 | pub use_tree: UseTree, |
537 | /// | 531 | } |
538 | /// This can be used to get the `UseTree` this `Import` corresponds to and allows emitting | 532 | |
539 | /// precise diagnostics. | 533 | #[derive(Debug, Clone, Eq, PartialEq)] |
540 | pub index: usize, | 534 | pub struct UseTree { |
535 | pub index: Idx<ast::UseTree>, | ||
536 | kind: UseTreeKind, | ||
537 | } | ||
538 | |||
539 | #[derive(Debug, Clone, Eq, PartialEq)] | ||
540 | pub enum UseTreeKind { | ||
541 | /// ``` | ||
542 | /// use path::to::Item; | ||
543 | /// use path::to::Item as Renamed; | ||
544 | /// use path::to::Trait as _; | ||
545 | /// ``` | ||
546 | Single { path: Interned<ModPath>, alias: Option<ImportAlias> }, | ||
547 | |||
548 | /// ``` | ||
549 | /// use *; // (invalid, but can occur in nested tree) | ||
550 | /// use path::*; | ||
551 | /// ``` | ||
552 | Glob { path: Option<Interned<ModPath>> }, | ||
553 | |||
554 | /// ``` | ||
555 | /// use prefix::{self, Item, ...}; | ||
556 | /// ``` | ||
557 | Prefixed { prefix: Option<Interned<ModPath>>, list: Box<[UseTree]> }, | ||
541 | } | 558 | } |
542 | 559 | ||
543 | #[derive(Debug, Clone, Eq, PartialEq)] | 560 | #[derive(Debug, Clone, Eq, PartialEq)] |
@@ -563,6 +580,7 @@ pub struct Function { | |||
563 | pub abi: Option<Interned<str>>, | 580 | pub abi: Option<Interned<str>>, |
564 | pub params: IdRange<Param>, | 581 | pub params: IdRange<Param>, |
565 | pub ret_type: Interned<TypeRef>, | 582 | pub ret_type: Interned<TypeRef>, |
583 | pub async_ret_type: Option<Interned<TypeRef>>, | ||
566 | pub ast_id: FileAstId<ast::Fn>, | 584 | pub ast_id: FileAstId<ast::Fn>, |
567 | pub(crate) flags: FnFlags, | 585 | pub(crate) flags: FnFlags, |
568 | } | 586 | } |
@@ -644,7 +662,6 @@ pub struct Trait { | |||
644 | pub generic_params: Interned<GenericParams>, | 662 | pub generic_params: Interned<GenericParams>, |
645 | pub is_auto: bool, | 663 | pub is_auto: bool, |
646 | pub is_unsafe: bool, | 664 | pub is_unsafe: bool, |
647 | pub bounds: Box<[TypeBound]>, | ||
648 | pub items: Box<[AssocItem]>, | 665 | pub items: Box<[AssocItem]>, |
649 | pub ast_id: FileAstId<ast::Trait>, | 666 | pub ast_id: FileAstId<ast::Trait>, |
650 | } | 667 | } |
@@ -664,7 +681,7 @@ pub struct TypeAlias { | |||
664 | pub name: Name, | 681 | pub name: Name, |
665 | pub visibility: RawVisibilityId, | 682 | pub visibility: RawVisibilityId, |
666 | /// Bounds on the type alias itself. Only valid in trait declarations, eg. `type Assoc: Copy;`. | 683 | /// Bounds on the type alias itself. Only valid in trait declarations, eg. `type Assoc: Copy;`. |
667 | pub bounds: Box<[TypeBound]>, | 684 | pub bounds: Box<[Interned<TypeBound>]>, |
668 | pub generic_params: Interned<GenericParams>, | 685 | pub generic_params: Interned<GenericParams>, |
669 | pub type_ref: Option<Interned<TypeRef>>, | 686 | pub type_ref: Option<Interned<TypeRef>>, |
670 | pub is_extern: bool, | 687 | pub is_extern: bool, |
@@ -711,6 +728,97 @@ pub struct MacroDef { | |||
711 | pub ast_id: FileAstId<ast::MacroDef>, | 728 | pub ast_id: FileAstId<ast::MacroDef>, |
712 | } | 729 | } |
713 | 730 | ||
731 | impl Import { | ||
732 | /// Maps a `UseTree` contained in this import back to its AST node. | ||
733 | pub fn use_tree_to_ast( | ||
734 | &self, | ||
735 | db: &dyn DefDatabase, | ||
736 | file_id: HirFileId, | ||
737 | index: Idx<ast::UseTree>, | ||
738 | ) -> ast::UseTree { | ||
739 | // Re-lower the AST item and get the source map. | ||
740 | // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`. | ||
741 | let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast()); | ||
742 | let ast_use_tree = ast.use_tree().expect("missing `use_tree`"); | ||
743 | let hygiene = Hygiene::new(db.upcast(), file_id); | ||
744 | let (_, source_map) = | ||
745 | lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree"); | ||
746 | source_map[index].clone() | ||
747 | } | ||
748 | } | ||
749 | |||
750 | impl UseTree { | ||
751 | /// Expands the `UseTree` into individually imported `ModPath`s. | ||
752 | pub fn expand( | ||
753 | &self, | ||
754 | mut cb: impl FnMut(Idx<ast::UseTree>, ModPath, /* is_glob */ bool, Option<ImportAlias>), | ||
755 | ) { | ||
756 | self.expand_impl(None, &mut cb) | ||
757 | } | ||
758 | |||
759 | fn expand_impl( | ||
760 | &self, | ||
761 | prefix: Option<ModPath>, | ||
762 | cb: &mut dyn FnMut( | ||
763 | Idx<ast::UseTree>, | ||
764 | ModPath, | ||
765 | /* is_glob */ bool, | ||
766 | Option<ImportAlias>, | ||
767 | ), | ||
768 | ) { | ||
769 | fn concat_mod_paths(prefix: Option<ModPath>, path: &ModPath) -> Option<ModPath> { | ||
770 | match (prefix, &path.kind) { | ||
771 | (None, _) => Some(path.clone()), | ||
772 | (Some(mut prefix), PathKind::Plain) => { | ||
773 | for segment in path.segments() { | ||
774 | prefix.push_segment(segment.clone()); | ||
775 | } | ||
776 | Some(prefix) | ||
777 | } | ||
778 | (Some(prefix), PathKind::Super(0)) => { | ||
779 | // `some::path::self` == `some::path` | ||
780 | if path.segments().is_empty() { | ||
781 | Some(prefix) | ||
782 | } else { | ||
783 | None | ||
784 | } | ||
785 | } | ||
786 | (Some(_), _) => None, | ||
787 | } | ||
788 | } | ||
789 | |||
790 | match &self.kind { | ||
791 | UseTreeKind::Single { path, alias } => { | ||
792 | if let Some(path) = concat_mod_paths(prefix, path) { | ||
793 | cb(self.index, path, false, alias.clone()); | ||
794 | } | ||
795 | } | ||
796 | UseTreeKind::Glob { path: Some(path) } => { | ||
797 | if let Some(path) = concat_mod_paths(prefix, path) { | ||
798 | cb(self.index, path, true, None); | ||
799 | } | ||
800 | } | ||
801 | UseTreeKind::Glob { path: None } => { | ||
802 | if let Some(prefix) = prefix { | ||
803 | cb(self.index, prefix, true, None); | ||
804 | } | ||
805 | } | ||
806 | UseTreeKind::Prefixed { prefix: additional_prefix, list } => { | ||
807 | let prefix = match additional_prefix { | ||
808 | Some(path) => match concat_mod_paths(prefix, path) { | ||
809 | Some(path) => Some(path), | ||
810 | None => return, | ||
811 | }, | ||
812 | None => prefix, | ||
813 | }; | ||
814 | for tree in &**list { | ||
815 | tree.expand_impl(prefix.clone(), cb); | ||
816 | } | ||
817 | } | ||
818 | } | ||
819 | } | ||
820 | } | ||
821 | |||
714 | macro_rules! impl_froms { | 822 | macro_rules! impl_froms { |
715 | ($e:ident { $($v:ident ($t:ty)),* $(,)? }) => { | 823 | ($e:ident { $($v:ident ($t:ty)),* $(,)? }) => { |
716 | $( | 824 | $( |
diff --git a/crates/hir_def/src/item_tree/lower.rs b/crates/hir_def/src/item_tree/lower.rs index 91cf75371..6208facd5 100644 --- a/crates/hir_def/src/item_tree/lower.rs +++ b/crates/hir_def/src/item_tree/lower.rs | |||
@@ -3,7 +3,6 @@ | |||
3 | use std::{collections::hash_map::Entry, mem, sync::Arc}; | 3 | use std::{collections::hash_map::Entry, mem, sync::Arc}; |
4 | 4 | ||
5 | use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, name::known, HirFileId}; | 5 | use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, name::known, HirFileId}; |
6 | use smallvec::SmallVec; | ||
7 | use syntax::{ | 6 | use syntax::{ |
8 | ast::{self, ModuleItemOwner}, | 7 | ast::{self, ModuleItemOwner}, |
9 | SyntaxNode, WalkEvent, | 8 | SyntaxNode, WalkEvent, |
@@ -20,22 +19,10 @@ fn id<N: ItemTreeNode>(index: Idx<N>) -> FileItemTreeId<N> { | |||
20 | FileItemTreeId { index, _p: PhantomData } | 19 | FileItemTreeId { index, _p: PhantomData } |
21 | } | 20 | } |
22 | 21 | ||
23 | struct ModItems(SmallVec<[ModItem; 1]>); | ||
24 | |||
25 | impl<T> From<T> for ModItems | ||
26 | where | ||
27 | T: Into<ModItem>, | ||
28 | { | ||
29 | fn from(t: T) -> Self { | ||
30 | ModItems(SmallVec::from_buf([t.into(); 1])) | ||
31 | } | ||
32 | } | ||
33 | |||
34 | pub(super) struct Ctx<'a> { | 22 | pub(super) struct Ctx<'a> { |
35 | db: &'a dyn DefDatabase, | 23 | db: &'a dyn DefDatabase, |
36 | tree: ItemTree, | 24 | tree: ItemTree, |
37 | hygiene: Hygiene, | 25 | hygiene: Hygiene, |
38 | file: HirFileId, | ||
39 | source_ast_id_map: Arc<AstIdMap>, | 26 | source_ast_id_map: Arc<AstIdMap>, |
40 | body_ctx: crate::body::LowerCtx<'a>, | 27 | body_ctx: crate::body::LowerCtx<'a>, |
41 | forced_visibility: Option<RawVisibilityId>, | 28 | forced_visibility: Option<RawVisibilityId>, |
@@ -47,7 +34,6 @@ impl<'a> Ctx<'a> { | |||
47 | db, | 34 | db, |
48 | tree: ItemTree::default(), | 35 | tree: ItemTree::default(), |
49 | hygiene, | 36 | hygiene, |
50 | file, | ||
51 | source_ast_id_map: db.ast_id_map(file), | 37 | source_ast_id_map: db.ast_id_map(file), |
52 | body_ctx: crate::body::LowerCtx::new(db, file), | 38 | body_ctx: crate::body::LowerCtx::new(db, file), |
53 | forced_visibility: None, | 39 | forced_visibility: None, |
@@ -55,11 +41,8 @@ impl<'a> Ctx<'a> { | |||
55 | } | 41 | } |
56 | 42 | ||
57 | pub(super) fn lower_module_items(mut self, item_owner: &dyn ModuleItemOwner) -> ItemTree { | 43 | pub(super) fn lower_module_items(mut self, item_owner: &dyn ModuleItemOwner) -> ItemTree { |
58 | self.tree.top_level = item_owner | 44 | self.tree.top_level = |
59 | .items() | 45 | item_owner.items().flat_map(|item| self.lower_mod_item(&item, false)).collect(); |
60 | .flat_map(|item| self.lower_mod_item(&item, false)) | ||
61 | .flat_map(|items| items.0) | ||
62 | .collect(); | ||
63 | self.tree | 46 | self.tree |
64 | } | 47 | } |
65 | 48 | ||
@@ -71,7 +54,6 @@ impl<'a> Ctx<'a> { | |||
71 | _ => None, | 54 | _ => None, |
72 | }) | 55 | }) |
73 | .flat_map(|item| self.lower_mod_item(&item, false)) | 56 | .flat_map(|item| self.lower_mod_item(&item, false)) |
74 | .flat_map(|items| items.0) | ||
75 | .collect(); | 57 | .collect(); |
76 | 58 | ||
77 | // Non-items need to have their inner items collected. | 59 | // Non-items need to have their inner items collected. |
@@ -98,7 +80,7 @@ impl<'a> Ctx<'a> { | |||
98 | self.tree.data_mut() | 80 | self.tree.data_mut() |
99 | } | 81 | } |
100 | 82 | ||
101 | fn lower_mod_item(&mut self, item: &ast::Item, inner: bool) -> Option<ModItems> { | 83 | fn lower_mod_item(&mut self, item: &ast::Item, inner: bool) -> Option<ModItem> { |
102 | // Collect inner items for 1-to-1-lowered items. | 84 | // Collect inner items for 1-to-1-lowered items. |
103 | match item { | 85 | match item { |
104 | ast::Item::Struct(_) | 86 | ast::Item::Struct(_) |
@@ -129,34 +111,28 @@ impl<'a> Ctx<'a> { | |||
129 | }; | 111 | }; |
130 | 112 | ||
131 | let attrs = RawAttrs::new(self.db, item, &self.hygiene); | 113 | let attrs = RawAttrs::new(self.db, item, &self.hygiene); |
132 | let items = match item { | 114 | let item: ModItem = match item { |
133 | ast::Item::Struct(ast) => self.lower_struct(ast).map(Into::into), | 115 | ast::Item::Struct(ast) => self.lower_struct(ast)?.into(), |
134 | ast::Item::Union(ast) => self.lower_union(ast).map(Into::into), | 116 | ast::Item::Union(ast) => self.lower_union(ast)?.into(), |
135 | ast::Item::Enum(ast) => self.lower_enum(ast).map(Into::into), | 117 | ast::Item::Enum(ast) => self.lower_enum(ast)?.into(), |
136 | ast::Item::Fn(ast) => self.lower_function(ast).map(Into::into), | 118 | ast::Item::Fn(ast) => self.lower_function(ast)?.into(), |
137 | ast::Item::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into), | 119 | ast::Item::TypeAlias(ast) => self.lower_type_alias(ast)?.into(), |
138 | ast::Item::Static(ast) => self.lower_static(ast).map(Into::into), | 120 | ast::Item::Static(ast) => self.lower_static(ast)?.into(), |
139 | ast::Item::Const(ast) => Some(self.lower_const(ast).into()), | 121 | ast::Item::Const(ast) => self.lower_const(ast).into(), |
140 | ast::Item::Module(ast) => self.lower_module(ast).map(Into::into), | 122 | ast::Item::Module(ast) => self.lower_module(ast)?.into(), |
141 | ast::Item::Trait(ast) => self.lower_trait(ast).map(Into::into), | 123 | ast::Item::Trait(ast) => self.lower_trait(ast)?.into(), |
142 | ast::Item::Impl(ast) => self.lower_impl(ast).map(Into::into), | 124 | ast::Item::Impl(ast) => self.lower_impl(ast)?.into(), |
143 | ast::Item::Use(ast) => Some(ModItems( | 125 | ast::Item::Use(ast) => self.lower_use(ast)?.into(), |
144 | self.lower_use(ast).into_iter().map(Into::into).collect::<SmallVec<_>>(), | 126 | ast::Item::ExternCrate(ast) => self.lower_extern_crate(ast)?.into(), |
145 | )), | 127 | ast::Item::MacroCall(ast) => self.lower_macro_call(ast)?.into(), |
146 | ast::Item::ExternCrate(ast) => self.lower_extern_crate(ast).map(Into::into), | 128 | ast::Item::MacroRules(ast) => self.lower_macro_rules(ast)?.into(), |
147 | ast::Item::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into), | 129 | ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(), |
148 | ast::Item::MacroRules(ast) => self.lower_macro_rules(ast).map(Into::into), | 130 | ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(), |
149 | ast::Item::MacroDef(ast) => self.lower_macro_def(ast).map(Into::into), | ||
150 | ast::Item::ExternBlock(ast) => Some(self.lower_extern_block(ast).into()), | ||
151 | }; | 131 | }; |
152 | 132 | ||
153 | if !attrs.is_empty() { | 133 | self.add_attrs(item.into(), attrs.clone()); |
154 | for item in items.iter().flat_map(|items| &items.0) { | ||
155 | self.add_attrs((*item).into(), attrs.clone()); | ||
156 | } | ||
157 | } | ||
158 | 134 | ||
159 | items | 135 | Some(item) |
160 | } | 136 | } |
161 | 137 | ||
162 | fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) { | 138 | fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) { |
@@ -190,12 +166,10 @@ impl<'a> Ctx<'a> { | |||
190 | }, | 166 | }, |
191 | ast::Item(item) => { | 167 | ast::Item(item) => { |
192 | // FIXME: This triggers for macro calls in expression/pattern/type position | 168 | // FIXME: This triggers for macro calls in expression/pattern/type position |
193 | let mod_items = self.lower_mod_item(&item, true); | 169 | let mod_item = self.lower_mod_item(&item, true); |
194 | let current_block = block_stack.last(); | 170 | let current_block = block_stack.last(); |
195 | if let (Some(mod_items), Some(block)) = (mod_items, current_block) { | 171 | if let (Some(mod_item), Some(block)) = (mod_item, current_block) { |
196 | if !mod_items.0.is_empty() { | 172 | self.data().inner_items.entry(*block).or_default().push(mod_item); |
197 | self.data().inner_items.entry(*block).or_default().extend(mod_items.0.iter().copied()); | ||
198 | } | ||
199 | } | 173 | } |
200 | }, | 174 | }, |
201 | _ => {} | 175 | _ => {} |
@@ -382,12 +356,13 @@ impl<'a> Ctx<'a> { | |||
382 | _ => TypeRef::unit(), | 356 | _ => TypeRef::unit(), |
383 | }; | 357 | }; |
384 | 358 | ||
385 | let ret_type = if func.async_token().is_some() { | 359 | let (ret_type, async_ret_type) = if func.async_token().is_some() { |
360 | let async_ret_type = ret_type.clone(); | ||
386 | let future_impl = desugar_future_path(ret_type); | 361 | let future_impl = desugar_future_path(ret_type); |
387 | let ty_bound = TypeBound::Path(future_impl); | 362 | let ty_bound = Interned::new(TypeBound::Path(future_impl)); |
388 | TypeRef::ImplTrait(vec![ty_bound]) | 363 | (TypeRef::ImplTrait(vec![ty_bound]), Some(async_ret_type)) |
389 | } else { | 364 | } else { |
390 | ret_type | 365 | (ret_type, None) |
391 | }; | 366 | }; |
392 | 367 | ||
393 | let abi = func.abi().map(lower_abi); | 368 | let abi = func.abi().map(lower_abi); |
@@ -421,6 +396,7 @@ impl<'a> Ctx<'a> { | |||
421 | abi, | 396 | abi, |
422 | params, | 397 | params, |
423 | ret_type: Interned::new(ret_type), | 398 | ret_type: Interned::new(ret_type), |
399 | async_ret_type: async_ret_type.map(Interned::new), | ||
424 | ast_id, | 400 | ast_id, |
425 | flags, | 401 | flags, |
426 | }; | 402 | }; |
@@ -480,10 +456,7 @@ impl<'a> Ctx<'a> { | |||
480 | items: module | 456 | items: module |
481 | .item_list() | 457 | .item_list() |
482 | .map(|list| { | 458 | .map(|list| { |
483 | list.items() | 459 | list.items().flat_map(|item| self.lower_mod_item(&item, false)).collect() |
484 | .flat_map(|item| self.lower_mod_item(&item, false)) | ||
485 | .flat_map(|items| items.0) | ||
486 | .collect() | ||
487 | }) | 460 | }) |
488 | .unwrap_or_else(|| { | 461 | .unwrap_or_else(|| { |
489 | cov_mark::hit!(name_res_works_for_broken_modules); | 462 | cov_mark::hit!(name_res_works_for_broken_modules); |
@@ -503,7 +476,6 @@ impl<'a> Ctx<'a> { | |||
503 | self.lower_generic_params_and_inner_items(GenericsOwner::Trait(trait_def), trait_def); | 476 | self.lower_generic_params_and_inner_items(GenericsOwner::Trait(trait_def), trait_def); |
504 | let is_auto = trait_def.auto_token().is_some(); | 477 | let is_auto = trait_def.auto_token().is_some(); |
505 | let is_unsafe = trait_def.unsafe_token().is_some(); | 478 | let is_unsafe = trait_def.unsafe_token().is_some(); |
506 | let bounds = self.lower_type_bounds(trait_def); | ||
507 | let items = trait_def.assoc_item_list().map(|list| { | 479 | let items = trait_def.assoc_item_list().map(|list| { |
508 | let db = self.db; | 480 | let db = self.db; |
509 | self.with_inherited_visibility(visibility, |this| { | 481 | self.with_inherited_visibility(visibility, |this| { |
@@ -526,7 +498,6 @@ impl<'a> Ctx<'a> { | |||
526 | generic_params, | 498 | generic_params, |
527 | is_auto, | 499 | is_auto, |
528 | is_unsafe, | 500 | is_unsafe, |
529 | bounds: bounds.into(), | ||
530 | items: items.unwrap_or_default(), | 501 | items: items.unwrap_or_default(), |
531 | ast_id, | 502 | ast_id, |
532 | }; | 503 | }; |
@@ -561,30 +532,13 @@ impl<'a> Ctx<'a> { | |||
561 | Some(id(self.data().impls.alloc(res))) | 532 | Some(id(self.data().impls.alloc(res))) |
562 | } | 533 | } |
563 | 534 | ||
564 | fn lower_use(&mut self, use_item: &ast::Use) -> Vec<FileItemTreeId<Import>> { | 535 | fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Import>> { |
565 | let visibility = self.lower_visibility(use_item); | 536 | let visibility = self.lower_visibility(use_item); |
566 | let ast_id = self.source_ast_id_map.ast_id(use_item); | 537 | let ast_id = self.source_ast_id_map.ast_id(use_item); |
538 | let (use_tree, _) = lower_use_tree(self.db, &self.hygiene, use_item.use_tree()?)?; | ||
567 | 539 | ||
568 | // Every use item can expand to many `Import`s. | 540 | let res = Import { visibility, ast_id, use_tree }; |
569 | let mut imports = Vec::new(); | 541 | Some(id(self.data().imports.alloc(res))) |
570 | let tree = self.tree.data_mut(); | ||
571 | ModPath::expand_use_item( | ||
572 | self.db, | ||
573 | InFile::new(self.file, use_item.clone()), | ||
574 | &self.hygiene, | ||
575 | |path, _use_tree, is_glob, alias| { | ||
576 | imports.push(id(tree.imports.alloc(Import { | ||
577 | path: Interned::new(path), | ||
578 | alias, | ||
579 | visibility, | ||
580 | is_glob, | ||
581 | ast_id, | ||
582 | index: imports.len(), | ||
583 | }))); | ||
584 | }, | ||
585 | ); | ||
586 | |||
587 | imports | ||
588 | } | 542 | } |
589 | 543 | ||
590 | fn lower_extern_crate( | 544 | fn lower_extern_crate( |
@@ -738,11 +692,12 @@ impl<'a> Ctx<'a> { | |||
738 | Interned::new(generics) | 692 | Interned::new(generics) |
739 | } | 693 | } |
740 | 694 | ||
741 | fn lower_type_bounds(&mut self, node: &impl ast::TypeBoundsOwner) -> Vec<TypeBound> { | 695 | fn lower_type_bounds(&mut self, node: &impl ast::TypeBoundsOwner) -> Vec<Interned<TypeBound>> { |
742 | match node.type_bound_list() { | 696 | match node.type_bound_list() { |
743 | Some(bound_list) => { | 697 | Some(bound_list) => bound_list |
744 | bound_list.bounds().map(|it| TypeBound::from_ast(&self.body_ctx, it)).collect() | 698 | .bounds() |
745 | } | 699 | .map(|it| Interned::new(TypeBound::from_ast(&self.body_ctx, it))) |
700 | .collect(), | ||
746 | None => Vec::new(), | 701 | None => Vec::new(), |
747 | } | 702 | } |
748 | } | 703 | } |
@@ -810,7 +765,7 @@ fn desugar_future_path(orig: TypeRef) -> Path { | |||
810 | let binding = | 765 | let binding = |
811 | AssociatedTypeBinding { name: name![Output], type_ref: Some(orig), bounds: Vec::new() }; | 766 | AssociatedTypeBinding { name: name![Output], type_ref: Some(orig), bounds: Vec::new() }; |
812 | last.bindings.push(binding); | 767 | last.bindings.push(binding); |
813 | generic_args.push(Some(Arc::new(last))); | 768 | generic_args.push(Some(Interned::new(last))); |
814 | 769 | ||
815 | Path::from_known_path(path, generic_args) | 770 | Path::from_known_path(path, generic_args) |
816 | } | 771 | } |
@@ -883,3 +838,81 @@ fn lower_abi(abi: ast::Abi) -> Interned<str> { | |||
883 | } | 838 | } |
884 | } | 839 | } |
885 | } | 840 | } |
841 | |||
842 | struct UseTreeLowering<'a> { | ||
843 | db: &'a dyn DefDatabase, | ||
844 | hygiene: &'a Hygiene, | ||
845 | mapping: Arena<ast::UseTree>, | ||
846 | } | ||
847 | |||
848 | impl UseTreeLowering<'_> { | ||
849 | fn lower_use_tree(&mut self, tree: ast::UseTree) -> Option<UseTree> { | ||
850 | if let Some(use_tree_list) = tree.use_tree_list() { | ||
851 | let prefix = match tree.path() { | ||
852 | // E.g. use something::{{{inner}}}; | ||
853 | None => None, | ||
854 | // E.g. `use something::{inner}` (prefix is `None`, path is `something`) | ||
855 | // or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`) | ||
856 | Some(path) => { | ||
857 | match ModPath::from_src(self.db, path, &self.hygiene) { | ||
858 | Some(it) => Some(it), | ||
859 | None => return None, // FIXME: report errors somewhere | ||
860 | } | ||
861 | } | ||
862 | }; | ||
863 | |||
864 | let list = | ||
865 | use_tree_list.use_trees().filter_map(|tree| self.lower_use_tree(tree)).collect(); | ||
866 | |||
867 | Some( | ||
868 | self.use_tree( | ||
869 | UseTreeKind::Prefixed { prefix: prefix.map(Interned::new), list }, | ||
870 | tree, | ||
871 | ), | ||
872 | ) | ||
873 | } else { | ||
874 | let is_glob = tree.star_token().is_some(); | ||
875 | let path = match tree.path() { | ||
876 | Some(path) => Some(ModPath::from_src(self.db, path, &self.hygiene)?), | ||
877 | None => None, | ||
878 | }; | ||
879 | let alias = tree.rename().map(|a| { | ||
880 | a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias) | ||
881 | }); | ||
882 | if alias.is_some() && is_glob { | ||
883 | return None; | ||
884 | } | ||
885 | |||
886 | match (path, alias, is_glob) { | ||
887 | (path, None, true) => { | ||
888 | if path.is_none() { | ||
889 | cov_mark::hit!(glob_enum_group); | ||
890 | } | ||
891 | Some(self.use_tree(UseTreeKind::Glob { path: path.map(Interned::new) }, tree)) | ||
892 | } | ||
893 | // Globs can't be renamed | ||
894 | (_, Some(_), true) | (None, None, false) => None, | ||
895 | // `bla::{ as Name}` is invalid | ||
896 | (None, Some(_), false) => None, | ||
897 | (Some(path), alias, false) => Some( | ||
898 | self.use_tree(UseTreeKind::Single { path: Interned::new(path), alias }, tree), | ||
899 | ), | ||
900 | } | ||
901 | } | ||
902 | } | ||
903 | |||
904 | fn use_tree(&mut self, kind: UseTreeKind, ast: ast::UseTree) -> UseTree { | ||
905 | let index = self.mapping.alloc(ast); | ||
906 | UseTree { index, kind } | ||
907 | } | ||
908 | } | ||
909 | |||
910 | pub(super) fn lower_use_tree( | ||
911 | db: &dyn DefDatabase, | ||
912 | hygiene: &Hygiene, | ||
913 | tree: ast::UseTree, | ||
914 | ) -> Option<(UseTree, Arena<ast::UseTree>)> { | ||
915 | let mut lowering = UseTreeLowering { db, hygiene, mapping: Arena::new() }; | ||
916 | let tree = lowering.lower_use_tree(tree)?; | ||
917 | Some((tree, lowering.mapping)) | ||
918 | } | ||
diff --git a/crates/hir_def/src/item_tree/pretty.rs b/crates/hir_def/src/item_tree/pretty.rs index 4bc87a0e2..cc9944a22 100644 --- a/crates/hir_def/src/item_tree/pretty.rs +++ b/crates/hir_def/src/item_tree/pretty.rs | |||
@@ -163,21 +163,46 @@ impl<'a> Printer<'a> { | |||
163 | } | 163 | } |
164 | } | 164 | } |
165 | 165 | ||
166 | fn print_use_tree(&mut self, use_tree: &UseTree) { | ||
167 | match &use_tree.kind { | ||
168 | UseTreeKind::Single { path, alias } => { | ||
169 | w!(self, "{}", path); | ||
170 | if let Some(alias) = alias { | ||
171 | w!(self, " as {}", alias); | ||
172 | } | ||
173 | } | ||
174 | UseTreeKind::Glob { path } => { | ||
175 | if let Some(path) = path { | ||
176 | w!(self, "{}::", path); | ||
177 | } | ||
178 | w!(self, "*"); | ||
179 | } | ||
180 | UseTreeKind::Prefixed { prefix, list } => { | ||
181 | if let Some(prefix) = prefix { | ||
182 | w!(self, "{}::", prefix); | ||
183 | } | ||
184 | w!(self, "{{"); | ||
185 | for (i, tree) in list.iter().enumerate() { | ||
186 | if i != 0 { | ||
187 | w!(self, ", "); | ||
188 | } | ||
189 | self.print_use_tree(tree); | ||
190 | } | ||
191 | w!(self, "}}"); | ||
192 | } | ||
193 | } | ||
194 | } | ||
195 | |||
166 | fn print_mod_item(&mut self, item: ModItem) { | 196 | fn print_mod_item(&mut self, item: ModItem) { |
167 | self.print_attrs_of(item); | 197 | self.print_attrs_of(item); |
168 | 198 | ||
169 | match item { | 199 | match item { |
170 | ModItem::Import(it) => { | 200 | ModItem::Import(it) => { |
171 | let Import { visibility, path, is_glob, alias, ast_id: _, index } = &self.tree[it]; | 201 | let Import { visibility, use_tree, ast_id: _ } = &self.tree[it]; |
172 | self.print_visibility(*visibility); | 202 | self.print_visibility(*visibility); |
173 | w!(self, "use {}", path); | 203 | w!(self, "use "); |
174 | if *is_glob { | 204 | self.print_use_tree(use_tree); |
175 | w!(self, "::*"); | 205 | wln!(self, ";"); |
176 | } | ||
177 | if let Some(alias) = alias { | ||
178 | w!(self, " as {}", alias); | ||
179 | } | ||
180 | wln!(self, "; // {}", index); | ||
181 | } | 206 | } |
182 | ModItem::ExternCrate(it) => { | 207 | ModItem::ExternCrate(it) => { |
183 | let ExternCrate { name, alias, visibility, ast_id: _ } = &self.tree[it]; | 208 | let ExternCrate { name, alias, visibility, ast_id: _ } = &self.tree[it]; |
@@ -210,6 +235,7 @@ impl<'a> Printer<'a> { | |||
210 | abi, | 235 | abi, |
211 | params, | 236 | params, |
212 | ret_type, | 237 | ret_type, |
238 | async_ret_type: _, | ||
213 | ast_id: _, | 239 | ast_id: _, |
214 | flags, | 240 | flags, |
215 | } = &self.tree[it]; | 241 | } = &self.tree[it]; |
@@ -320,7 +346,6 @@ impl<'a> Printer<'a> { | |||
320 | visibility, | 346 | visibility, |
321 | is_auto, | 347 | is_auto, |
322 | is_unsafe, | 348 | is_unsafe, |
323 | bounds, | ||
324 | items, | 349 | items, |
325 | generic_params, | 350 | generic_params, |
326 | ast_id: _, | 351 | ast_id: _, |
@@ -334,10 +359,6 @@ impl<'a> Printer<'a> { | |||
334 | } | 359 | } |
335 | w!(self, "trait {}", name); | 360 | w!(self, "trait {}", name); |
336 | self.print_generic_params(generic_params); | 361 | self.print_generic_params(generic_params); |
337 | if !bounds.is_empty() { | ||
338 | w!(self, ": "); | ||
339 | self.print_type_bounds(bounds); | ||
340 | } | ||
341 | self.print_where_clause_and_opening_brace(generic_params); | 362 | self.print_where_clause_and_opening_brace(generic_params); |
342 | self.indented(|this| { | 363 | self.indented(|this| { |
343 | for item in &**items { | 364 | for item in &**items { |
@@ -513,13 +534,13 @@ impl<'a> Printer<'a> { | |||
513 | } | 534 | } |
514 | } | 535 | } |
515 | 536 | ||
516 | fn print_type_bounds(&mut self, bounds: &[TypeBound]) { | 537 | fn print_type_bounds(&mut self, bounds: &[Interned<TypeBound>]) { |
517 | for (i, bound) in bounds.iter().enumerate() { | 538 | for (i, bound) in bounds.iter().enumerate() { |
518 | if i != 0 { | 539 | if i != 0 { |
519 | w!(self, " + "); | 540 | w!(self, " + "); |
520 | } | 541 | } |
521 | 542 | ||
522 | match bound { | 543 | match bound.as_ref() { |
523 | TypeBound::Path(path) => self.print_path(path), | 544 | TypeBound::Path(path) => self.print_path(path), |
524 | TypeBound::Lifetime(lt) => w!(self, "{}", lt.name), | 545 | TypeBound::Lifetime(lt) => w!(self, "{}", lt.name), |
525 | TypeBound::Error => w!(self, "{{unknown}}"), | 546 | TypeBound::Error => w!(self, "{{unknown}}"), |
diff --git a/crates/hir_def/src/item_tree/tests.rs b/crates/hir_def/src/item_tree/tests.rs index 6407871b5..b362add5c 100644 --- a/crates/hir_def/src/item_tree/tests.rs +++ b/crates/hir_def/src/item_tree/tests.rs | |||
@@ -26,6 +26,8 @@ use globs::*; | |||
26 | 26 | ||
27 | /// docs on import | 27 | /// docs on import |
28 | use crate::{A, B}; | 28 | use crate::{A, B}; |
29 | |||
30 | use a::{c, d::{e}}; | ||
29 | "#, | 31 | "#, |
30 | expect![[r##" | 32 | expect![[r##" |
31 | #![doc = " file comment"] // AttrId { is_doc_comment: true, ast_index: 0 } | 33 | #![doc = " file comment"] // AttrId { is_doc_comment: true, ast_index: 0 } |
@@ -36,19 +38,14 @@ use crate::{A, B}; | |||
36 | 38 | ||
37 | pub(super) extern crate bli; | 39 | pub(super) extern crate bli; |
38 | 40 | ||
39 | pub use crate::path::nested; // 0 | 41 | pub use crate::path::{nested, items as renamed, Trait as _}; |
40 | |||
41 | pub use crate::path::items as renamed; // 1 | ||
42 | 42 | ||
43 | pub use crate::path::Trait as _; // 2 | 43 | pub(self) use globs::*; |
44 | |||
45 | pub(self) use globs::*; // 0 | ||
46 | 44 | ||
47 | #[doc = " docs on import"] // AttrId { is_doc_comment: true, ast_index: 0 } | 45 | #[doc = " docs on import"] // AttrId { is_doc_comment: true, ast_index: 0 } |
48 | pub(self) use crate::A; // 0 | 46 | pub(self) use crate::{A, B}; |
49 | 47 | ||
50 | #[doc = " docs on import"] // AttrId { is_doc_comment: true, ast_index: 0 } | 48 | pub(self) use a::{c, d::{e}}; |
51 | pub(self) use crate::B; // 1 | ||
52 | "##]], | 49 | "##]], |
53 | ); | 50 | ); |
54 | } | 51 | } |
@@ -183,7 +180,7 @@ trait Tr: SuperTrait + 'lifetime { | |||
183 | _: (), | 180 | _: (), |
184 | ) -> (); | 181 | ) -> (); |
185 | 182 | ||
186 | pub(self) trait Tr<Self>: SuperTrait + 'lifetime | 183 | pub(self) trait Tr<Self> |
187 | where | 184 | where |
188 | Self: SuperTrait, | 185 | Self: SuperTrait, |
189 | Self: 'lifetime | 186 | Self: 'lifetime |
@@ -218,7 +215,7 @@ mod outline; | |||
218 | #[doc = " outer"] // AttrId { is_doc_comment: true, ast_index: 0 } | 215 | #[doc = " outer"] // AttrId { is_doc_comment: true, ast_index: 0 } |
219 | #[doc = " inner"] // AttrId { is_doc_comment: true, ast_index: 1 } | 216 | #[doc = " inner"] // AttrId { is_doc_comment: true, ast_index: 1 } |
220 | pub(self) mod inline { | 217 | pub(self) mod inline { |
221 | pub(self) use super::*; // 0 | 218 | pub(self) use super::*; |
222 | 219 | ||
223 | // flags = 0x2 | 220 | // flags = 0x2 |
224 | pub(self) fn fn_in_module() -> (); | 221 | pub(self) fn fn_in_module() -> (); |
@@ -353,7 +350,7 @@ trait Tr<'a, T: 'a>: Super {} | |||
353 | pub(self) union Union<'a, T, const U: u8> { | 350 | pub(self) union Union<'a, T, const U: u8> { |
354 | } | 351 | } |
355 | 352 | ||
356 | pub(self) trait Tr<'a, Self, T>: Super | 353 | pub(self) trait Tr<'a, Self, T> |
357 | where | 354 | where |
358 | Self: Super, | 355 | Self: Super, |
359 | T: 'a | 356 | T: 'a |
diff --git a/crates/hir_def/src/lib.rs b/crates/hir_def/src/lib.rs index 70001cac8..9aa95720a 100644 --- a/crates/hir_def/src/lib.rs +++ b/crates/hir_def/src/lib.rs | |||
@@ -19,7 +19,6 @@ pub mod path; | |||
19 | pub mod type_ref; | 19 | pub mod type_ref; |
20 | pub mod builtin_type; | 20 | pub mod builtin_type; |
21 | pub mod builtin_attr; | 21 | pub mod builtin_attr; |
22 | pub mod diagnostics; | ||
23 | pub mod per_ns; | 22 | pub mod per_ns; |
24 | pub mod item_scope; | 23 | pub mod item_scope; |
25 | 24 | ||
@@ -56,7 +55,6 @@ use std::{ | |||
56 | sync::Arc, | 55 | sync::Arc, |
57 | }; | 56 | }; |
58 | 57 | ||
59 | use adt::VariantData; | ||
60 | use base_db::{impl_intern_key, salsa, CrateId}; | 58 | use base_db::{impl_intern_key, salsa, CrateId}; |
61 | use hir_expand::{ | 59 | use hir_expand::{ |
62 | ast_id_map::FileAstId, | 60 | ast_id_map::FileAstId, |
@@ -67,15 +65,18 @@ use hir_expand::{ | |||
67 | use la_arena::Idx; | 65 | use la_arena::Idx; |
68 | use nameres::DefMap; | 66 | use nameres::DefMap; |
69 | use path::ModPath; | 67 | use path::ModPath; |
68 | use stdx::impl_from; | ||
70 | use syntax::ast; | 69 | use syntax::ast; |
71 | 70 | ||
72 | use crate::attr::AttrId; | 71 | use crate::{ |
73 | use crate::builtin_type::BuiltinType; | 72 | adt::VariantData, |
74 | use item_tree::{ | 73 | attr::AttrId, |
75 | Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, ModItem, Static, Struct, Trait, | 74 | builtin_type::BuiltinType, |
76 | TypeAlias, Union, | 75 | item_tree::{ |
76 | Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, ModItem, Static, Struct, Trait, | ||
77 | TypeAlias, Union, | ||
78 | }, | ||
77 | }; | 79 | }; |
78 | use stdx::impl_from; | ||
79 | 80 | ||
80 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | 81 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
81 | pub struct ModuleId { | 82 | pub struct ModuleId { |
diff --git a/crates/hir_def/src/nameres.rs b/crates/hir_def/src/nameres.rs index 249af6fc8..ebfcc26c4 100644 --- a/crates/hir_def/src/nameres.rs +++ b/crates/hir_def/src/nameres.rs | |||
@@ -47,18 +47,19 @@ | |||
47 | //! path and, upon success, we run macro expansion and "collect module" phase on | 47 | //! path and, upon success, we run macro expansion and "collect module" phase on |
48 | //! the result | 48 | //! the result |
49 | 49 | ||
50 | pub mod diagnostics; | ||
50 | mod collector; | 51 | mod collector; |
51 | mod mod_resolution; | 52 | mod mod_resolution; |
52 | mod path_resolution; | 53 | mod path_resolution; |
54 | mod proc_macro; | ||
53 | 55 | ||
54 | #[cfg(test)] | 56 | #[cfg(test)] |
55 | mod tests; | 57 | mod tests; |
56 | mod proc_macro; | ||
57 | 58 | ||
58 | use std::sync::Arc; | 59 | use std::sync::Arc; |
59 | 60 | ||
60 | use base_db::{CrateId, Edition, FileId}; | 61 | use base_db::{CrateId, Edition, FileId}; |
61 | use hir_expand::{diagnostics::DiagnosticSink, name::Name, InFile, MacroDefId}; | 62 | use hir_expand::{name::Name, InFile, MacroDefId}; |
62 | use la_arena::Arena; | 63 | use la_arena::Arena; |
63 | use profile::Count; | 64 | use profile::Count; |
64 | use rustc_hash::FxHashMap; | 65 | use rustc_hash::FxHashMap; |
@@ -254,15 +255,6 @@ impl DefMap { | |||
254 | } | 255 | } |
255 | } | 256 | } |
256 | 257 | ||
257 | pub fn add_diagnostics( | ||
258 | &self, | ||
259 | db: &dyn DefDatabase, | ||
260 | module: LocalModuleId, | ||
261 | sink: &mut DiagnosticSink, | ||
262 | ) { | ||
263 | self.diagnostics.iter().for_each(|it| it.add_to(db, module, sink)) | ||
264 | } | ||
265 | |||
266 | pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator<Item = LocalModuleId> + '_ { | 258 | pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator<Item = LocalModuleId> + '_ { |
267 | self.modules | 259 | self.modules |
268 | .iter() | 260 | .iter() |
@@ -448,6 +440,11 @@ impl DefMap { | |||
448 | module.scope.shrink_to_fit(); | 440 | module.scope.shrink_to_fit(); |
449 | } | 441 | } |
450 | } | 442 | } |
443 | |||
444 | /// Get a reference to the def map's diagnostics. | ||
445 | pub fn diagnostics(&self) -> &[DefDiagnostic] { | ||
446 | self.diagnostics.as_slice() | ||
447 | } | ||
451 | } | 448 | } |
452 | 449 | ||
453 | impl ModuleData { | 450 | impl ModuleData { |
@@ -471,236 +468,3 @@ pub enum ModuleSource { | |||
471 | Module(ast::Module), | 468 | Module(ast::Module), |
472 | BlockExpr(ast::BlockExpr), | 469 | BlockExpr(ast::BlockExpr), |
473 | } | 470 | } |
474 | |||
475 | mod diagnostics { | ||
476 | use cfg::{CfgExpr, CfgOptions}; | ||
477 | use hir_expand::diagnostics::DiagnosticSink; | ||
478 | use hir_expand::hygiene::Hygiene; | ||
479 | use hir_expand::{InFile, MacroCallKind}; | ||
480 | use syntax::ast::AttrsOwner; | ||
481 | use syntax::{ast, AstNode, AstPtr, SyntaxKind, SyntaxNodePtr}; | ||
482 | |||
483 | use crate::path::ModPath; | ||
484 | use crate::{db::DefDatabase, diagnostics::*, nameres::LocalModuleId, AstId}; | ||
485 | |||
486 | #[derive(Debug, PartialEq, Eq)] | ||
487 | enum DiagnosticKind { | ||
488 | UnresolvedModule { declaration: AstId<ast::Module>, candidate: String }, | ||
489 | |||
490 | UnresolvedExternCrate { ast: AstId<ast::ExternCrate> }, | ||
491 | |||
492 | UnresolvedImport { ast: AstId<ast::Use>, index: usize }, | ||
493 | |||
494 | UnconfiguredCode { ast: AstId<ast::Item>, cfg: CfgExpr, opts: CfgOptions }, | ||
495 | |||
496 | UnresolvedProcMacro { ast: MacroCallKind }, | ||
497 | |||
498 | UnresolvedMacroCall { ast: AstId<ast::MacroCall>, path: ModPath }, | ||
499 | |||
500 | MacroError { ast: MacroCallKind, message: String }, | ||
501 | } | ||
502 | |||
503 | #[derive(Debug, PartialEq, Eq)] | ||
504 | pub(super) struct DefDiagnostic { | ||
505 | in_module: LocalModuleId, | ||
506 | kind: DiagnosticKind, | ||
507 | } | ||
508 | |||
509 | impl DefDiagnostic { | ||
510 | pub(super) fn unresolved_module( | ||
511 | container: LocalModuleId, | ||
512 | declaration: AstId<ast::Module>, | ||
513 | candidate: String, | ||
514 | ) -> Self { | ||
515 | Self { | ||
516 | in_module: container, | ||
517 | kind: DiagnosticKind::UnresolvedModule { declaration, candidate }, | ||
518 | } | ||
519 | } | ||
520 | |||
521 | pub(super) fn unresolved_extern_crate( | ||
522 | container: LocalModuleId, | ||
523 | declaration: AstId<ast::ExternCrate>, | ||
524 | ) -> Self { | ||
525 | Self { | ||
526 | in_module: container, | ||
527 | kind: DiagnosticKind::UnresolvedExternCrate { ast: declaration }, | ||
528 | } | ||
529 | } | ||
530 | |||
531 | pub(super) fn unresolved_import( | ||
532 | container: LocalModuleId, | ||
533 | ast: AstId<ast::Use>, | ||
534 | index: usize, | ||
535 | ) -> Self { | ||
536 | Self { in_module: container, kind: DiagnosticKind::UnresolvedImport { ast, index } } | ||
537 | } | ||
538 | |||
539 | pub(super) fn unconfigured_code( | ||
540 | container: LocalModuleId, | ||
541 | ast: AstId<ast::Item>, | ||
542 | cfg: CfgExpr, | ||
543 | opts: CfgOptions, | ||
544 | ) -> Self { | ||
545 | Self { in_module: container, kind: DiagnosticKind::UnconfiguredCode { ast, cfg, opts } } | ||
546 | } | ||
547 | |||
548 | pub(super) fn unresolved_proc_macro(container: LocalModuleId, ast: MacroCallKind) -> Self { | ||
549 | Self { in_module: container, kind: DiagnosticKind::UnresolvedProcMacro { ast } } | ||
550 | } | ||
551 | |||
552 | pub(super) fn macro_error( | ||
553 | container: LocalModuleId, | ||
554 | ast: MacroCallKind, | ||
555 | message: String, | ||
556 | ) -> Self { | ||
557 | Self { in_module: container, kind: DiagnosticKind::MacroError { ast, message } } | ||
558 | } | ||
559 | |||
560 | pub(super) fn unresolved_macro_call( | ||
561 | container: LocalModuleId, | ||
562 | ast: AstId<ast::MacroCall>, | ||
563 | path: ModPath, | ||
564 | ) -> Self { | ||
565 | Self { in_module: container, kind: DiagnosticKind::UnresolvedMacroCall { ast, path } } | ||
566 | } | ||
567 | |||
568 | pub(super) fn add_to( | ||
569 | &self, | ||
570 | db: &dyn DefDatabase, | ||
571 | target_module: LocalModuleId, | ||
572 | sink: &mut DiagnosticSink, | ||
573 | ) { | ||
574 | if self.in_module != target_module { | ||
575 | return; | ||
576 | } | ||
577 | |||
578 | match &self.kind { | ||
579 | DiagnosticKind::UnresolvedModule { declaration, candidate } => { | ||
580 | let decl = declaration.to_node(db.upcast()); | ||
581 | sink.push(UnresolvedModule { | ||
582 | file: declaration.file_id, | ||
583 | decl: AstPtr::new(&decl), | ||
584 | candidate: candidate.clone(), | ||
585 | }) | ||
586 | } | ||
587 | |||
588 | DiagnosticKind::UnresolvedExternCrate { ast } => { | ||
589 | let item = ast.to_node(db.upcast()); | ||
590 | sink.push(UnresolvedExternCrate { | ||
591 | file: ast.file_id, | ||
592 | item: AstPtr::new(&item), | ||
593 | }); | ||
594 | } | ||
595 | |||
596 | DiagnosticKind::UnresolvedImport { ast, index } => { | ||
597 | let use_item = ast.to_node(db.upcast()); | ||
598 | let hygiene = Hygiene::new(db.upcast(), ast.file_id); | ||
599 | let mut cur = 0; | ||
600 | let mut tree = None; | ||
601 | ModPath::expand_use_item( | ||
602 | db, | ||
603 | InFile::new(ast.file_id, use_item), | ||
604 | &hygiene, | ||
605 | |_mod_path, use_tree, _is_glob, _alias| { | ||
606 | if cur == *index { | ||
607 | tree = Some(use_tree.clone()); | ||
608 | } | ||
609 | |||
610 | cur += 1; | ||
611 | }, | ||
612 | ); | ||
613 | |||
614 | if let Some(tree) = tree { | ||
615 | sink.push(UnresolvedImport { file: ast.file_id, node: AstPtr::new(&tree) }); | ||
616 | } | ||
617 | } | ||
618 | |||
619 | DiagnosticKind::UnconfiguredCode { ast, cfg, opts } => { | ||
620 | let item = ast.to_node(db.upcast()); | ||
621 | sink.push(InactiveCode { | ||
622 | file: ast.file_id, | ||
623 | node: AstPtr::new(&item).into(), | ||
624 | cfg: cfg.clone(), | ||
625 | opts: opts.clone(), | ||
626 | }); | ||
627 | } | ||
628 | |||
629 | DiagnosticKind::UnresolvedProcMacro { ast } => { | ||
630 | let mut precise_location = None; | ||
631 | let (file, ast, name) = match ast { | ||
632 | MacroCallKind::FnLike { ast_id, .. } => { | ||
633 | let node = ast_id.to_node(db.upcast()); | ||
634 | (ast_id.file_id, SyntaxNodePtr::from(AstPtr::new(&node)), None) | ||
635 | } | ||
636 | MacroCallKind::Derive { ast_id, derive_name, .. } => { | ||
637 | let node = ast_id.to_node(db.upcast()); | ||
638 | |||
639 | // Compute the precise location of the macro name's token in the derive | ||
640 | // list. | ||
641 | // FIXME: This does not handle paths to the macro, but neither does the | ||
642 | // rest of r-a. | ||
643 | let derive_attrs = | ||
644 | node.attrs().filter_map(|attr| match attr.as_simple_call() { | ||
645 | Some((name, args)) if name == "derive" => Some(args), | ||
646 | _ => None, | ||
647 | }); | ||
648 | 'outer: for attr in derive_attrs { | ||
649 | let tokens = | ||
650 | attr.syntax().children_with_tokens().filter_map(|elem| { | ||
651 | match elem { | ||
652 | syntax::NodeOrToken::Node(_) => None, | ||
653 | syntax::NodeOrToken::Token(tok) => Some(tok), | ||
654 | } | ||
655 | }); | ||
656 | for token in tokens { | ||
657 | if token.kind() == SyntaxKind::IDENT | ||
658 | && token.text() == derive_name.as_str() | ||
659 | { | ||
660 | precise_location = Some(token.text_range()); | ||
661 | break 'outer; | ||
662 | } | ||
663 | } | ||
664 | } | ||
665 | |||
666 | ( | ||
667 | ast_id.file_id, | ||
668 | SyntaxNodePtr::from(AstPtr::new(&node)), | ||
669 | Some(derive_name.clone()), | ||
670 | ) | ||
671 | } | ||
672 | }; | ||
673 | sink.push(UnresolvedProcMacro { | ||
674 | file, | ||
675 | node: ast, | ||
676 | precise_location, | ||
677 | macro_name: name, | ||
678 | }); | ||
679 | } | ||
680 | |||
681 | DiagnosticKind::UnresolvedMacroCall { ast, path } => { | ||
682 | let node = ast.to_node(db.upcast()); | ||
683 | sink.push(UnresolvedMacroCall { | ||
684 | file: ast.file_id, | ||
685 | node: AstPtr::new(&node), | ||
686 | path: path.clone(), | ||
687 | }); | ||
688 | } | ||
689 | |||
690 | DiagnosticKind::MacroError { ast, message } => { | ||
691 | let (file, ast) = match ast { | ||
692 | MacroCallKind::FnLike { ast_id, .. } => { | ||
693 | let node = ast_id.to_node(db.upcast()); | ||
694 | (ast_id.file_id, SyntaxNodePtr::from(AstPtr::new(&node))) | ||
695 | } | ||
696 | MacroCallKind::Derive { ast_id, .. } => { | ||
697 | let node = ast_id.to_node(db.upcast()); | ||
698 | (ast_id.file_id, SyntaxNodePtr::from(AstPtr::new(&node))) | ||
699 | } | ||
700 | }; | ||
701 | sink.push(MacroError { file, node: ast, message: message.clone() }); | ||
702 | } | ||
703 | } | ||
704 | } | ||
705 | } | ||
706 | } | ||
diff --git a/crates/hir_def/src/nameres/collector.rs b/crates/hir_def/src/nameres/collector.rs index 014ea4de4..d9d6c91a8 100644 --- a/crates/hir_def/src/nameres/collector.rs +++ b/crates/hir_def/src/nameres/collector.rs | |||
@@ -17,6 +17,7 @@ use hir_expand::{ | |||
17 | }; | 17 | }; |
18 | use hir_expand::{InFile, MacroCallLoc}; | 18 | use hir_expand::{InFile, MacroCallLoc}; |
19 | use itertools::Itertools; | 19 | use itertools::Itertools; |
20 | use la_arena::Idx; | ||
20 | use rustc_hash::{FxHashMap, FxHashSet}; | 21 | use rustc_hash::{FxHashMap, FxHashSet}; |
21 | use syntax::ast; | 22 | use syntax::ast; |
22 | 23 | ||
@@ -33,7 +34,10 @@ use crate::{ | |||
33 | }, | 34 | }, |
34 | macro_call_as_call_id, | 35 | macro_call_as_call_id, |
35 | nameres::{ | 36 | nameres::{ |
36 | diagnostics::DefDiagnostic, mod_resolution::ModDir, path_resolution::ReachedFixedPoint, | 37 | diagnostics::DefDiagnostic, |
38 | mod_resolution::ModDir, | ||
39 | path_resolution::ReachedFixedPoint, | ||
40 | proc_macro::{ProcMacroDef, ProcMacroKind}, | ||
37 | BuiltinShadowMode, DefMap, ModuleData, ModuleOrigin, ResolveMode, | 41 | BuiltinShadowMode, DefMap, ModuleData, ModuleOrigin, ResolveMode, |
38 | }, | 42 | }, |
39 | path::{ImportAlias, ModPath, PathKind}, | 43 | path::{ImportAlias, ModPath, PathKind}, |
@@ -44,8 +48,6 @@ use crate::{ | |||
44 | UnresolvedMacro, | 48 | UnresolvedMacro, |
45 | }; | 49 | }; |
46 | 50 | ||
47 | use super::proc_macro::{ProcMacroDef, ProcMacroKind}; | ||
48 | |||
49 | const GLOB_RECURSION_LIMIT: usize = 100; | 51 | const GLOB_RECURSION_LIMIT: usize = 100; |
50 | const EXPANSION_DEPTH_LIMIT: usize = 128; | 52 | const EXPANSION_DEPTH_LIMIT: usize = 128; |
51 | const FIXED_POINT_LIMIT: usize = 8192; | 53 | const FIXED_POINT_LIMIT: usize = 8192; |
@@ -142,7 +144,7 @@ impl PartialResolvedImport { | |||
142 | 144 | ||
143 | #[derive(Clone, Debug, Eq, PartialEq)] | 145 | #[derive(Clone, Debug, Eq, PartialEq)] |
144 | enum ImportSource { | 146 | enum ImportSource { |
145 | Import(ItemTreeId<item_tree::Import>), | 147 | Import { id: ItemTreeId<item_tree::Import>, use_tree: Idx<ast::UseTree> }, |
146 | ExternCrate(ItemTreeId<item_tree::ExternCrate>), | 148 | ExternCrate(ItemTreeId<item_tree::ExternCrate>), |
147 | } | 149 | } |
148 | 150 | ||
@@ -164,20 +166,26 @@ impl Import { | |||
164 | krate: CrateId, | 166 | krate: CrateId, |
165 | tree: &ItemTree, | 167 | tree: &ItemTree, |
166 | id: ItemTreeId<item_tree::Import>, | 168 | id: ItemTreeId<item_tree::Import>, |
167 | ) -> Self { | 169 | ) -> Vec<Self> { |
168 | let it = &tree[id.value]; | 170 | let it = &tree[id.value]; |
169 | let attrs = &tree.attrs(db, krate, ModItem::from(id.value).into()); | 171 | let attrs = &tree.attrs(db, krate, ModItem::from(id.value).into()); |
170 | let visibility = &tree[it.visibility]; | 172 | let visibility = &tree[it.visibility]; |
171 | Self { | 173 | let is_prelude = attrs.by_key("prelude_import").exists(); |
172 | path: it.path.clone(), | 174 | |
173 | alias: it.alias.clone(), | 175 | let mut res = Vec::new(); |
174 | visibility: visibility.clone(), | 176 | it.use_tree.expand(|idx, path, is_glob, alias| { |
175 | is_glob: it.is_glob, | 177 | res.push(Self { |
176 | is_prelude: attrs.by_key("prelude_import").exists(), | 178 | path: Interned::new(path), // FIXME this makes little sense |
177 | is_extern_crate: false, | 179 | alias, |
178 | is_macro_use: false, | 180 | visibility: visibility.clone(), |
179 | source: ImportSource::Import(id), | 181 | is_glob, |
180 | } | 182 | is_prelude, |
183 | is_extern_crate: false, | ||
184 | is_macro_use: false, | ||
185 | source: ImportSource::Import { id, use_tree: idx }, | ||
186 | }); | ||
187 | }); | ||
188 | res | ||
181 | } | 189 | } |
182 | 190 | ||
183 | fn from_extern_crate( | 191 | fn from_extern_crate( |
@@ -285,7 +293,7 @@ impl DefCollector<'_> { | |||
285 | let registered_name = if *attr_name == hir_expand::name![register_attr] | 293 | let registered_name = if *attr_name == hir_expand::name![register_attr] |
286 | || *attr_name == hir_expand::name![register_tool] | 294 | || *attr_name == hir_expand::name![register_tool] |
287 | { | 295 | { |
288 | match &attr.input { | 296 | match attr.input.as_deref() { |
289 | Some(AttrInput::TokenTree(subtree)) => match &*subtree.token_trees { | 297 | Some(AttrInput::TokenTree(subtree)) => match &*subtree.token_trees { |
290 | [tt::TokenTree::Leaf(tt::Leaf::Ident(name))] => name.as_name(), | 298 | [tt::TokenTree::Leaf(tt::Leaf::Ident(name))] => name.as_name(), |
291 | _ => continue, | 299 | _ => continue, |
@@ -469,16 +477,21 @@ impl DefCollector<'_> { | |||
469 | /// going out of sync with what the build system sees (since we resolve using VFS state, but | 477 | /// going out of sync with what the build system sees (since we resolve using VFS state, but |
470 | /// Cargo builds only on-disk files). We could and probably should add diagnostics for that. | 478 | /// Cargo builds only on-disk files). We could and probably should add diagnostics for that. |
471 | fn export_proc_macro(&mut self, def: ProcMacroDef, ast_id: AstId<ast::Fn>) { | 479 | fn export_proc_macro(&mut self, def: ProcMacroDef, ast_id: AstId<ast::Fn>) { |
480 | let kind = def.kind.to_basedb_kind(); | ||
472 | self.exports_proc_macros = true; | 481 | self.exports_proc_macros = true; |
473 | let macro_def = match self.proc_macros.iter().find(|(n, _)| n == &def.name) { | 482 | let macro_def = match self.proc_macros.iter().find(|(n, _)| n == &def.name) { |
474 | Some((_, expander)) => MacroDefId { | 483 | Some((_, expander)) => MacroDefId { |
475 | krate: self.def_map.krate, | 484 | krate: self.def_map.krate, |
476 | kind: MacroDefKind::ProcMacro(*expander, ast_id), | 485 | kind: MacroDefKind::ProcMacro(*expander, kind, ast_id), |
477 | local_inner: false, | 486 | local_inner: false, |
478 | }, | 487 | }, |
479 | None => MacroDefId { | 488 | None => MacroDefId { |
480 | krate: self.def_map.krate, | 489 | krate: self.def_map.krate, |
481 | kind: MacroDefKind::ProcMacro(ProcMacroExpander::dummy(self.def_map.krate), ast_id), | 490 | kind: MacroDefKind::ProcMacro( |
491 | ProcMacroExpander::dummy(self.def_map.krate), | ||
492 | kind, | ||
493 | ast_id, | ||
494 | ), | ||
482 | local_inner: false, | 495 | local_inner: false, |
483 | }, | 496 | }, |
484 | }; | 497 | }; |
@@ -1129,11 +1142,8 @@ impl DefCollector<'_> { | |||
1129 | } | 1142 | } |
1130 | 1143 | ||
1131 | for directive in &self.unresolved_imports { | 1144 | for directive in &self.unresolved_imports { |
1132 | if let ImportSource::Import(import) = &directive.import.source { | 1145 | if let ImportSource::Import { id: import, use_tree } = &directive.import.source { |
1133 | let item_tree = import.item_tree(self.db); | 1146 | match (directive.import.path.segments().first(), &directive.import.path.kind) { |
1134 | let import_data = &item_tree[import.value]; | ||
1135 | |||
1136 | match (import_data.path.segments().first(), &import_data.path.kind) { | ||
1137 | (Some(krate), PathKind::Plain) | (Some(krate), PathKind::Abs) => { | 1147 | (Some(krate), PathKind::Plain) | (Some(krate), PathKind::Abs) => { |
1138 | if diagnosed_extern_crates.contains(krate) { | 1148 | if diagnosed_extern_crates.contains(krate) { |
1139 | continue; | 1149 | continue; |
@@ -1144,8 +1154,8 @@ impl DefCollector<'_> { | |||
1144 | 1154 | ||
1145 | self.def_map.diagnostics.push(DefDiagnostic::unresolved_import( | 1155 | self.def_map.diagnostics.push(DefDiagnostic::unresolved_import( |
1146 | directive.module_id, | 1156 | directive.module_id, |
1147 | InFile::new(import.file_id(), import_data.ast_id), | 1157 | *import, |
1148 | import_data.index, | 1158 | *use_tree, |
1149 | )); | 1159 | )); |
1150 | } | 1160 | } |
1151 | } | 1161 | } |
@@ -1221,16 +1231,20 @@ impl ModCollector<'_, '_> { | |||
1221 | match item { | 1231 | match item { |
1222 | ModItem::Mod(m) => self.collect_module(&self.item_tree[m], &attrs), | 1232 | ModItem::Mod(m) => self.collect_module(&self.item_tree[m], &attrs), |
1223 | ModItem::Import(import_id) => { | 1233 | ModItem::Import(import_id) => { |
1224 | self.def_collector.unresolved_imports.push(ImportDirective { | 1234 | let module_id = self.module_id; |
1225 | module_id: self.module_id, | 1235 | let imports = Import::from_use( |
1226 | import: Import::from_use( | 1236 | self.def_collector.db, |
1227 | self.def_collector.db, | 1237 | krate, |
1228 | krate, | 1238 | &self.item_tree, |
1229 | &self.item_tree, | 1239 | ItemTreeId::new(self.file_id, import_id), |
1230 | ItemTreeId::new(self.file_id, import_id), | 1240 | ); |
1231 | ), | 1241 | self.def_collector.unresolved_imports.extend(imports.into_iter().map( |
1232 | status: PartialResolvedImport::Unresolved, | 1242 | |import| ImportDirective { |
1233 | }) | 1243 | module_id, |
1244 | import, | ||
1245 | status: PartialResolvedImport::Unresolved, | ||
1246 | }, | ||
1247 | )); | ||
1234 | } | 1248 | } |
1235 | ModItem::ExternCrate(import_id) => { | 1249 | ModItem::ExternCrate(import_id) => { |
1236 | self.def_collector.unresolved_imports.push(ImportDirective { | 1250 | self.def_collector.unresolved_imports.push(ImportDirective { |
diff --git a/crates/hir_def/src/nameres/diagnostics.rs b/crates/hir_def/src/nameres/diagnostics.rs new file mode 100644 index 000000000..57c36c3c6 --- /dev/null +++ b/crates/hir_def/src/nameres/diagnostics.rs | |||
@@ -0,0 +1,96 @@ | |||
1 | //! Diagnostics emitted during DefMap construction. | ||
2 | |||
3 | use cfg::{CfgExpr, CfgOptions}; | ||
4 | use hir_expand::MacroCallKind; | ||
5 | use la_arena::Idx; | ||
6 | use syntax::ast; | ||
7 | |||
8 | use crate::{ | ||
9 | item_tree::{self, ItemTreeId}, | ||
10 | nameres::LocalModuleId, | ||
11 | path::ModPath, | ||
12 | AstId, | ||
13 | }; | ||
14 | |||
15 | #[derive(Debug, PartialEq, Eq)] | ||
16 | pub enum DefDiagnosticKind { | ||
17 | UnresolvedModule { ast: AstId<ast::Module>, candidate: String }, | ||
18 | |||
19 | UnresolvedExternCrate { ast: AstId<ast::ExternCrate> }, | ||
20 | |||
21 | UnresolvedImport { id: ItemTreeId<item_tree::Import>, index: Idx<ast::UseTree> }, | ||
22 | |||
23 | UnconfiguredCode { ast: AstId<ast::Item>, cfg: CfgExpr, opts: CfgOptions }, | ||
24 | |||
25 | UnresolvedProcMacro { ast: MacroCallKind }, | ||
26 | |||
27 | UnresolvedMacroCall { ast: AstId<ast::MacroCall>, path: ModPath }, | ||
28 | |||
29 | MacroError { ast: MacroCallKind, message: String }, | ||
30 | } | ||
31 | |||
32 | #[derive(Debug, PartialEq, Eq)] | ||
33 | pub struct DefDiagnostic { | ||
34 | pub in_module: LocalModuleId, | ||
35 | pub kind: DefDiagnosticKind, | ||
36 | } | ||
37 | |||
38 | impl DefDiagnostic { | ||
39 | pub(super) fn unresolved_module( | ||
40 | container: LocalModuleId, | ||
41 | declaration: AstId<ast::Module>, | ||
42 | candidate: String, | ||
43 | ) -> Self { | ||
44 | Self { | ||
45 | in_module: container, | ||
46 | kind: DefDiagnosticKind::UnresolvedModule { ast: declaration, candidate }, | ||
47 | } | ||
48 | } | ||
49 | |||
50 | pub(super) fn unresolved_extern_crate( | ||
51 | container: LocalModuleId, | ||
52 | declaration: AstId<ast::ExternCrate>, | ||
53 | ) -> Self { | ||
54 | Self { | ||
55 | in_module: container, | ||
56 | kind: DefDiagnosticKind::UnresolvedExternCrate { ast: declaration }, | ||
57 | } | ||
58 | } | ||
59 | |||
60 | pub(super) fn unresolved_import( | ||
61 | container: LocalModuleId, | ||
62 | id: ItemTreeId<item_tree::Import>, | ||
63 | index: Idx<ast::UseTree>, | ||
64 | ) -> Self { | ||
65 | Self { in_module: container, kind: DefDiagnosticKind::UnresolvedImport { id, index } } | ||
66 | } | ||
67 | |||
68 | pub(super) fn unconfigured_code( | ||
69 | container: LocalModuleId, | ||
70 | ast: AstId<ast::Item>, | ||
71 | cfg: CfgExpr, | ||
72 | opts: CfgOptions, | ||
73 | ) -> Self { | ||
74 | Self { in_module: container, kind: DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } } | ||
75 | } | ||
76 | |||
77 | pub(super) fn unresolved_proc_macro(container: LocalModuleId, ast: MacroCallKind) -> Self { | ||
78 | Self { in_module: container, kind: DefDiagnosticKind::UnresolvedProcMacro { ast } } | ||
79 | } | ||
80 | |||
81 | pub(super) fn macro_error( | ||
82 | container: LocalModuleId, | ||
83 | ast: MacroCallKind, | ||
84 | message: String, | ||
85 | ) -> Self { | ||
86 | Self { in_module: container, kind: DefDiagnosticKind::MacroError { ast, message } } | ||
87 | } | ||
88 | |||
89 | pub(super) fn unresolved_macro_call( | ||
90 | container: LocalModuleId, | ||
91 | ast: AstId<ast::MacroCall>, | ||
92 | path: ModPath, | ||
93 | ) -> Self { | ||
94 | Self { in_module: container, kind: DefDiagnosticKind::UnresolvedMacroCall { ast, path } } | ||
95 | } | ||
96 | } | ||
diff --git a/crates/hir_def/src/nameres/proc_macro.rs b/crates/hir_def/src/nameres/proc_macro.rs index 156598f19..3f095d623 100644 --- a/crates/hir_def/src/nameres/proc_macro.rs +++ b/crates/hir_def/src/nameres/proc_macro.rs | |||
@@ -18,6 +18,16 @@ pub(super) enum ProcMacroKind { | |||
18 | Attr, | 18 | Attr, |
19 | } | 19 | } |
20 | 20 | ||
21 | impl ProcMacroKind { | ||
22 | pub(super) fn to_basedb_kind(&self) -> base_db::ProcMacroKind { | ||
23 | match self { | ||
24 | ProcMacroKind::CustomDerive { .. } => base_db::ProcMacroKind::CustomDerive, | ||
25 | ProcMacroKind::FnLike => base_db::ProcMacroKind::FuncLike, | ||
26 | ProcMacroKind::Attr => base_db::ProcMacroKind::Attr, | ||
27 | } | ||
28 | } | ||
29 | } | ||
30 | |||
21 | impl Attrs { | 31 | impl Attrs { |
22 | #[rustfmt::skip] | 32 | #[rustfmt::skip] |
23 | pub(super) fn parse_proc_macro_decl(&self, func_name: &Name) -> Option<ProcMacroDef> { | 33 | pub(super) fn parse_proc_macro_decl(&self, func_name: &Name) -> Option<ProcMacroDef> { |
diff --git a/crates/hir_def/src/nameres/tests/diagnostics.rs b/crates/hir_def/src/nameres/tests/diagnostics.rs index 75147d973..ec6670952 100644 --- a/crates/hir_def/src/nameres/tests/diagnostics.rs +++ b/crates/hir_def/src/nameres/tests/diagnostics.rs | |||
@@ -18,7 +18,7 @@ fn unresolved_import() { | |||
18 | r" | 18 | r" |
19 | use does_exist; | 19 | use does_exist; |
20 | use does_not_exist; | 20 | use does_not_exist; |
21 | //^^^^^^^^^^^^^^ unresolved import | 21 | //^^^^^^^^^^^^^^^^^^^ UnresolvedImport |
22 | 22 | ||
23 | mod does_exist {} | 23 | mod does_exist {} |
24 | ", | 24 | ", |
@@ -26,40 +26,13 @@ fn unresolved_import() { | |||
26 | } | 26 | } |
27 | 27 | ||
28 | #[test] | 28 | #[test] |
29 | fn unresolved_import_in_use_tree() { | ||
30 | // Only the relevant part of a nested `use` item should be highlighted. | ||
31 | check_diagnostics( | ||
32 | r" | ||
33 | use does_exist::{Exists, DoesntExist}; | ||
34 | //^^^^^^^^^^^ unresolved import | ||
35 | |||
36 | use {does_not_exist::*, does_exist}; | ||
37 | //^^^^^^^^^^^^^^^^^ unresolved import | ||
38 | |||
39 | use does_not_exist::{ | ||
40 | a, | ||
41 | //^ unresolved import | ||
42 | b, | ||
43 | //^ unresolved import | ||
44 | c, | ||
45 | //^ unresolved import | ||
46 | }; | ||
47 | |||
48 | mod does_exist { | ||
49 | pub struct Exists; | ||
50 | } | ||
51 | ", | ||
52 | ); | ||
53 | } | ||
54 | |||
55 | #[test] | ||
56 | fn unresolved_extern_crate() { | 29 | fn unresolved_extern_crate() { |
57 | check_diagnostics( | 30 | check_diagnostics( |
58 | r" | 31 | r" |
59 | //- /main.rs crate:main deps:core | 32 | //- /main.rs crate:main deps:core |
60 | extern crate core; | 33 | extern crate core; |
61 | extern crate doesnotexist; | 34 | extern crate doesnotexist; |
62 | //^^^^^^^^^^^^^^^^^^^^^^^^^^ unresolved extern crate | 35 | //^^^^^^^^^^^^^^^^^^^^^^^^^^ UnresolvedExternCrate |
63 | //- /lib.rs crate:core | 36 | //- /lib.rs crate:core |
64 | ", | 37 | ", |
65 | ); | 38 | ); |
@@ -72,7 +45,7 @@ fn extern_crate_self_as() { | |||
72 | r" | 45 | r" |
73 | //- /lib.rs | 46 | //- /lib.rs |
74 | extern crate doesnotexist; | 47 | extern crate doesnotexist; |
75 | //^^^^^^^^^^^^^^^^^^^^^^^^^^ unresolved extern crate | 48 | //^^^^^^^^^^^^^^^^^^^^^^^^^^ UnresolvedExternCrate |
76 | // Should not error. | 49 | // Should not error. |
77 | extern crate self as foo; | 50 | extern crate self as foo; |
78 | struct Foo; | 51 | struct Foo; |
@@ -88,18 +61,18 @@ fn dedup_unresolved_import_from_unresolved_crate() { | |||
88 | //- /main.rs crate:main | 61 | //- /main.rs crate:main |
89 | mod a { | 62 | mod a { |
90 | extern crate doesnotexist; | 63 | extern crate doesnotexist; |
91 | //^^^^^^^^^^^^^^^^^^^^^^^^^^ unresolved extern crate | 64 | //^^^^^^^^^^^^^^^^^^^^^^^^^^ UnresolvedExternCrate |
92 | 65 | ||
93 | // Should not error, since we already errored for the missing crate. | 66 | // Should not error, since we already errored for the missing crate. |
94 | use doesnotexist::{self, bla, *}; | 67 | use doesnotexist::{self, bla, *}; |
95 | 68 | ||
96 | use crate::doesnotexist; | 69 | use crate::doesnotexist; |
97 | //^^^^^^^^^^^^^^^^^^^ unresolved import | 70 | //^^^^^^^^^^^^^^^^^^^^^^^^ UnresolvedImport |
98 | } | 71 | } |
99 | 72 | ||
100 | mod m { | 73 | mod m { |
101 | use super::doesnotexist; | 74 | use super::doesnotexist; |
102 | //^^^^^^^^^^^^^^^^^^^ unresolved import | 75 | //^^^^^^^^^^^^^^^^^^^^^^^^ UnresolvedImport |
103 | } | 76 | } |
104 | ", | 77 | ", |
105 | ); | 78 | ); |
@@ -112,7 +85,7 @@ fn unresolved_module() { | |||
112 | //- /lib.rs | 85 | //- /lib.rs |
113 | mod foo; | 86 | mod foo; |
114 | mod bar; | 87 | mod bar; |
115 | //^^^^^^^^ unresolved module | 88 | //^^^^^^^^ UnresolvedModule |
116 | mod baz {} | 89 | mod baz {} |
117 | //- /foo.rs | 90 | //- /foo.rs |
118 | ", | 91 | ", |
@@ -127,16 +100,16 @@ fn inactive_item() { | |||
127 | r#" | 100 | r#" |
128 | //- /lib.rs | 101 | //- /lib.rs |
129 | #[cfg(no)] pub fn f() {} | 102 | #[cfg(no)] pub fn f() {} |
130 | //^^^^^^^^^^^^^^^^^^^^^^^^ code is inactive due to #[cfg] directives: no is disabled | 103 | //^^^^^^^^^^^^^^^^^^^^^^^^ UnconfiguredCode |
131 | 104 | ||
132 | #[cfg(no)] #[cfg(no2)] mod m; | 105 | #[cfg(no)] #[cfg(no2)] mod m; |
133 | //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ code is inactive due to #[cfg] directives: no and no2 are disabled | 106 | //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ UnconfiguredCode |
134 | 107 | ||
135 | #[cfg(all(not(a), b))] enum E {} | 108 | #[cfg(all(not(a), b))] enum E {} |
136 | //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ code is inactive due to #[cfg] directives: b is disabled | 109 | //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ UnconfiguredCode |
137 | 110 | ||
138 | #[cfg(feature = "std")] use std; | 111 | #[cfg(feature = "std")] use std; |
139 | //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ code is inactive due to #[cfg] directives: feature = "std" is disabled | 112 | //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ UnconfiguredCode |
140 | "#, | 113 | "#, |
141 | ); | 114 | ); |
142 | } | 115 | } |
@@ -149,14 +122,14 @@ fn inactive_via_cfg_attr() { | |||
149 | r#" | 122 | r#" |
150 | //- /lib.rs | 123 | //- /lib.rs |
151 | #[cfg_attr(not(never), cfg(no))] fn f() {} | 124 | #[cfg_attr(not(never), cfg(no))] fn f() {} |
152 | //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ code is inactive due to #[cfg] directives: no is disabled | 125 | //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ UnconfiguredCode |
153 | 126 | ||
154 | #[cfg_attr(not(never), cfg(not(no)))] fn f() {} | 127 | #[cfg_attr(not(never), cfg(not(no)))] fn f() {} |
155 | 128 | ||
156 | #[cfg_attr(never, cfg(no))] fn g() {} | 129 | #[cfg_attr(never, cfg(no))] fn g() {} |
157 | 130 | ||
158 | #[cfg_attr(not(never), inline, cfg(no))] fn h() {} | 131 | #[cfg_attr(not(never), inline, cfg(no))] fn h() {} |
159 | //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ code is inactive due to #[cfg] directives: no is disabled | 132 | //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ UnconfiguredCode |
160 | "#, | 133 | "#, |
161 | ); | 134 | ); |
162 | } | 135 | } |
@@ -170,7 +143,7 @@ fn unresolved_legacy_scope_macro() { | |||
170 | 143 | ||
171 | m!(); | 144 | m!(); |
172 | m2!(); | 145 | m2!(); |
173 | //^^^^^^ unresolved macro `self::m2!` | 146 | //^^^^^^ UnresolvedMacroCall |
174 | "#, | 147 | "#, |
175 | ); | 148 | ); |
176 | } | 149 | } |
@@ -187,7 +160,7 @@ fn unresolved_module_scope_macro() { | |||
187 | 160 | ||
188 | self::m!(); | 161 | self::m!(); |
189 | self::m2!(); | 162 | self::m2!(); |
190 | //^^^^^^^^^^^^ unresolved macro `self::m2!` | 163 | //^^^^^^^^^^^^ UnresolvedMacroCall |
191 | "#, | 164 | "#, |
192 | ); | 165 | ); |
193 | } | 166 | } |
diff --git a/crates/hir_def/src/nameres/tests/incremental.rs b/crates/hir_def/src/nameres/tests/incremental.rs index d884a6eb4..7bf152e26 100644 --- a/crates/hir_def/src/nameres/tests/incremental.rs +++ b/crates/hir_def/src/nameres/tests/incremental.rs | |||
@@ -1,6 +1,8 @@ | |||
1 | use std::sync::Arc; | 1 | use std::sync::Arc; |
2 | 2 | ||
3 | use base_db::SourceDatabaseExt; | 3 | use base_db::{salsa::SweepStrategy, SourceDatabaseExt}; |
4 | |||
5 | use crate::{AdtId, ModuleDefId}; | ||
4 | 6 | ||
5 | use super::*; | 7 | use super::*; |
6 | 8 | ||
@@ -163,3 +165,73 @@ m!(Z); | |||
163 | assert_eq!(n_reparsed_macros, 0); | 165 | assert_eq!(n_reparsed_macros, 0); |
164 | } | 166 | } |
165 | } | 167 | } |
168 | |||
169 | #[test] | ||
170 | fn item_tree_prevents_reparsing() { | ||
171 | // The `ItemTree` is used by both name resolution and the various queries in `adt.rs` and | ||
172 | // `data.rs`. After computing the `ItemTree` and deleting the parse tree, we should be able to | ||
173 | // run those other queries without triggering a reparse. | ||
174 | |||
175 | let (db, pos) = TestDB::with_position( | ||
176 | r#" | ||
177 | pub struct S; | ||
178 | pub union U {} | ||
179 | pub enum E { | ||
180 | Variant, | ||
181 | } | ||
182 | pub fn f(_: S) { $0 } | ||
183 | pub trait Tr {} | ||
184 | impl Tr for () {} | ||
185 | pub const C: u8 = 0; | ||
186 | pub static ST: u8 = 0; | ||
187 | pub type Ty = (); | ||
188 | "#, | ||
189 | ); | ||
190 | let krate = db.test_crate(); | ||
191 | { | ||
192 | let events = db.log_executed(|| { | ||
193 | db.file_item_tree(pos.file_id.into()); | ||
194 | }); | ||
195 | let n_calculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count(); | ||
196 | assert_eq!(n_calculated_item_trees, 1); | ||
197 | let n_parsed_files = events.iter().filter(|it| it.contains("parse(")).count(); | ||
198 | assert_eq!(n_parsed_files, 1); | ||
199 | } | ||
200 | |||
201 | // Delete the parse tree. | ||
202 | let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); | ||
203 | base_db::ParseQuery.in_db(&db).sweep(sweep); | ||
204 | |||
205 | { | ||
206 | let events = db.log_executed(|| { | ||
207 | let crate_def_map = db.crate_def_map(krate); | ||
208 | let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); | ||
209 | assert_eq!(module_data.scope.resolutions().count(), 8); | ||
210 | assert_eq!(module_data.scope.impls().count(), 1); | ||
211 | |||
212 | for imp in module_data.scope.impls() { | ||
213 | db.impl_data(imp); | ||
214 | } | ||
215 | |||
216 | for (_, res) in module_data.scope.resolutions() { | ||
217 | match res.values.or(res.types).unwrap().0 { | ||
218 | ModuleDefId::FunctionId(f) => drop(db.function_data(f)), | ||
219 | ModuleDefId::AdtId(adt) => match adt { | ||
220 | AdtId::StructId(it) => drop(db.struct_data(it)), | ||
221 | AdtId::UnionId(it) => drop(db.union_data(it)), | ||
222 | AdtId::EnumId(it) => drop(db.enum_data(it)), | ||
223 | }, | ||
224 | ModuleDefId::ConstId(it) => drop(db.const_data(it)), | ||
225 | ModuleDefId::StaticId(it) => drop(db.static_data(it)), | ||
226 | ModuleDefId::TraitId(it) => drop(db.trait_data(it)), | ||
227 | ModuleDefId::TypeAliasId(it) => drop(db.type_alias_data(it)), | ||
228 | ModuleDefId::EnumVariantId(_) | ||
229 | | ModuleDefId::ModuleId(_) | ||
230 | | ModuleDefId::BuiltinType(_) => unreachable!(), | ||
231 | } | ||
232 | } | ||
233 | }); | ||
234 | let n_reparsed_files = events.iter().filter(|it| it.contains("parse(")).count(); | ||
235 | assert_eq!(n_reparsed_files, 0); | ||
236 | } | ||
237 | } | ||
diff --git a/crates/hir_def/src/path.rs b/crates/hir_def/src/path.rs index 9b8873fd2..16440041d 100644 --- a/crates/hir_def/src/path.rs +++ b/crates/hir_def/src/path.rs | |||
@@ -4,7 +4,6 @@ mod lower; | |||
4 | use std::{ | 4 | use std::{ |
5 | fmt::{self, Display}, | 5 | fmt::{self, Display}, |
6 | iter, | 6 | iter, |
7 | sync::Arc, | ||
8 | }; | 7 | }; |
9 | 8 | ||
10 | use crate::{body::LowerCtx, db::DefDatabase, intern::Interned, type_ref::LifetimeRef}; | 9 | use crate::{body::LowerCtx, db::DefDatabase, intern::Interned, type_ref::LifetimeRef}; |
@@ -15,10 +14,7 @@ use hir_expand::{ | |||
15 | }; | 14 | }; |
16 | use syntax::ast; | 15 | use syntax::ast; |
17 | 16 | ||
18 | use crate::{ | 17 | use crate::type_ref::{TypeBound, TypeRef}; |
19 | type_ref::{TypeBound, TypeRef}, | ||
20 | InFile, | ||
21 | }; | ||
22 | 18 | ||
23 | #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] | 19 | #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] |
24 | pub struct ModPath { | 20 | pub struct ModPath { |
@@ -57,8 +53,7 @@ impl Display for ImportAlias { | |||
57 | 53 | ||
58 | impl ModPath { | 54 | impl ModPath { |
59 | pub fn from_src(db: &dyn DefDatabase, path: ast::Path, hygiene: &Hygiene) -> Option<ModPath> { | 55 | pub fn from_src(db: &dyn DefDatabase, path: ast::Path, hygiene: &Hygiene) -> Option<ModPath> { |
60 | let ctx = LowerCtx::with_hygiene(db, hygiene); | 56 | lower::convert_path(db, None, path, hygiene) |
61 | lower::lower_path(path, &ctx).map(|it| (*it.mod_path).clone()) | ||
62 | } | 57 | } |
63 | 58 | ||
64 | pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath { | 59 | pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath { |
@@ -71,18 +66,6 @@ impl ModPath { | |||
71 | ModPath { kind, segments: Vec::new() } | 66 | ModPath { kind, segments: Vec::new() } |
72 | } | 67 | } |
73 | 68 | ||
74 | /// Calls `cb` with all paths, represented by this use item. | ||
75 | pub(crate) fn expand_use_item( | ||
76 | db: &dyn DefDatabase, | ||
77 | item_src: InFile<ast::Use>, | ||
78 | hygiene: &Hygiene, | ||
79 | mut cb: impl FnMut(ModPath, &ast::UseTree, /* is_glob */ bool, Option<ImportAlias>), | ||
80 | ) { | ||
81 | if let Some(tree) = item_src.value.use_tree() { | ||
82 | lower::lower_use_tree(db, None, tree, hygiene, &mut cb); | ||
83 | } | ||
84 | } | ||
85 | |||
86 | pub fn segments(&self) -> &[Name] { | 69 | pub fn segments(&self) -> &[Name] { |
87 | &self.segments | 70 | &self.segments |
88 | } | 71 | } |
@@ -136,7 +119,7 @@ pub struct Path { | |||
136 | type_anchor: Option<Interned<TypeRef>>, | 119 | type_anchor: Option<Interned<TypeRef>>, |
137 | mod_path: Interned<ModPath>, | 120 | mod_path: Interned<ModPath>, |
138 | /// Invariant: the same len as `self.mod_path.segments` | 121 | /// Invariant: the same len as `self.mod_path.segments` |
139 | generic_args: Vec<Option<Arc<GenericArgs>>>, | 122 | generic_args: Vec<Option<Interned<GenericArgs>>>, |
140 | } | 123 | } |
141 | 124 | ||
142 | /// Generic arguments to a path segment (e.g. the `i32` in `Option<i32>`). This | 125 | /// Generic arguments to a path segment (e.g. the `i32` in `Option<i32>`). This |
@@ -165,7 +148,7 @@ pub struct AssociatedTypeBinding { | |||
165 | /// Bounds for the associated type, like in `Iterator<Item: | 148 | /// Bounds for the associated type, like in `Iterator<Item: |
166 | /// SomeOtherTrait>`. (This is the unstable `associated_type_bounds` | 149 | /// SomeOtherTrait>`. (This is the unstable `associated_type_bounds` |
167 | /// feature.) | 150 | /// feature.) |
168 | pub bounds: Vec<TypeBound>, | 151 | pub bounds: Vec<Interned<TypeBound>>, |
169 | } | 152 | } |
170 | 153 | ||
171 | /// A single generic argument. | 154 | /// A single generic argument. |
@@ -185,7 +168,7 @@ impl Path { | |||
185 | /// Converts a known mod path to `Path`. | 168 | /// Converts a known mod path to `Path`. |
186 | pub(crate) fn from_known_path( | 169 | pub(crate) fn from_known_path( |
187 | path: ModPath, | 170 | path: ModPath, |
188 | generic_args: Vec<Option<Arc<GenericArgs>>>, | 171 | generic_args: Vec<Option<Interned<GenericArgs>>>, |
189 | ) -> Path { | 172 | ) -> Path { |
190 | Path { type_anchor: None, mod_path: Interned::new(path), generic_args } | 173 | Path { type_anchor: None, mod_path: Interned::new(path), generic_args } |
191 | } | 174 | } |
@@ -239,7 +222,7 @@ pub struct PathSegment<'a> { | |||
239 | 222 | ||
240 | pub struct PathSegments<'a> { | 223 | pub struct PathSegments<'a> { |
241 | segments: &'a [Name], | 224 | segments: &'a [Name], |
242 | generic_args: &'a [Option<Arc<GenericArgs>>], | 225 | generic_args: &'a [Option<Interned<GenericArgs>>], |
243 | } | 226 | } |
244 | 227 | ||
245 | impl<'a> PathSegments<'a> { | 228 | impl<'a> PathSegments<'a> { |
diff --git a/crates/hir_def/src/path/lower.rs b/crates/hir_def/src/path/lower.rs index a873325b2..f6220aa92 100644 --- a/crates/hir_def/src/path/lower.rs +++ b/crates/hir_def/src/path/lower.rs | |||
@@ -3,7 +3,6 @@ | |||
3 | mod lower_use; | 3 | mod lower_use; |
4 | 4 | ||
5 | use crate::intern::Interned; | 5 | use crate::intern::Interned; |
6 | use std::sync::Arc; | ||
7 | 6 | ||
8 | use either::Either; | 7 | use either::Either; |
9 | use hir_expand::name::{name, AsName}; | 8 | use hir_expand::name::{name, AsName}; |
@@ -16,7 +15,7 @@ use crate::{ | |||
16 | type_ref::{LifetimeRef, TypeBound, TypeRef}, | 15 | type_ref::{LifetimeRef, TypeBound, TypeRef}, |
17 | }; | 16 | }; |
18 | 17 | ||
19 | pub(super) use lower_use::lower_use_tree; | 18 | pub(super) use lower_use::convert_path; |
20 | 19 | ||
21 | /// Converts an `ast::Path` to `Path`. Works with use trees. | 20 | /// Converts an `ast::Path` to `Path`. Works with use trees. |
22 | /// It correctly handles `$crate` based path from macro call. | 21 | /// It correctly handles `$crate` based path from macro call. |
@@ -48,7 +47,7 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx) -> Option<Path> { | |||
48 | segment.ret_type(), | 47 | segment.ret_type(), |
49 | ) | 48 | ) |
50 | }) | 49 | }) |
51 | .map(Arc::new); | 50 | .map(Interned::new); |
52 | segments.push(name); | 51 | segments.push(name); |
53 | generic_args.push(args) | 52 | generic_args.push(args) |
54 | } | 53 | } |
@@ -87,13 +86,13 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx) -> Option<Path> { | |||
87 | // Insert the type reference (T in the above example) as Self parameter for the trait | 86 | // Insert the type reference (T in the above example) as Self parameter for the trait |
88 | let last_segment = | 87 | let last_segment = |
89 | generic_args.iter_mut().rev().nth(num_segments.saturating_sub(1))?; | 88 | generic_args.iter_mut().rev().nth(num_segments.saturating_sub(1))?; |
90 | if last_segment.is_none() { | 89 | let mut args_inner = match last_segment { |
91 | *last_segment = Some(Arc::new(GenericArgs::empty())); | 90 | Some(it) => it.as_ref().clone(), |
91 | None => GenericArgs::empty(), | ||
92 | }; | 92 | }; |
93 | let args = last_segment.as_mut().unwrap(); | ||
94 | let mut args_inner = Arc::make_mut(args); | ||
95 | args_inner.has_self_type = true; | 93 | args_inner.has_self_type = true; |
96 | args_inner.args.insert(0, GenericArg::Type(self_type)); | 94 | args_inner.args.insert(0, GenericArg::Type(self_type)); |
95 | *last_segment = Some(Interned::new(args_inner)); | ||
97 | } | 96 | } |
98 | } | 97 | } |
99 | } | 98 | } |
@@ -171,7 +170,9 @@ pub(super) fn lower_generic_args( | |||
171 | let name = name_ref.as_name(); | 170 | let name = name_ref.as_name(); |
172 | let type_ref = assoc_type_arg.ty().map(|it| TypeRef::from_ast(lower_ctx, it)); | 171 | let type_ref = assoc_type_arg.ty().map(|it| TypeRef::from_ast(lower_ctx, it)); |
173 | let bounds = if let Some(l) = assoc_type_arg.type_bound_list() { | 172 | let bounds = if let Some(l) = assoc_type_arg.type_bound_list() { |
174 | l.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)).collect() | 173 | l.bounds() |
174 | .map(|it| Interned::new(TypeBound::from_ast(lower_ctx, it))) | ||
175 | .collect() | ||
175 | } else { | 176 | } else { |
176 | Vec::new() | 177 | Vec::new() |
177 | }; | 178 | }; |
@@ -204,15 +205,14 @@ fn lower_generic_args_from_fn_path( | |||
204 | ) -> Option<GenericArgs> { | 205 | ) -> Option<GenericArgs> { |
205 | let mut args = Vec::new(); | 206 | let mut args = Vec::new(); |
206 | let mut bindings = Vec::new(); | 207 | let mut bindings = Vec::new(); |
207 | if let Some(params) = params { | 208 | let params = params?; |
208 | let mut param_types = Vec::new(); | 209 | let mut param_types = Vec::new(); |
209 | for param in params.params() { | 210 | for param in params.params() { |
210 | let type_ref = TypeRef::from_ast_opt(&ctx, param.ty()); | 211 | let type_ref = TypeRef::from_ast_opt(&ctx, param.ty()); |
211 | param_types.push(type_ref); | 212 | param_types.push(type_ref); |
212 | } | ||
213 | let arg = GenericArg::Type(TypeRef::Tuple(param_types)); | ||
214 | args.push(arg); | ||
215 | } | 213 | } |
214 | let arg = GenericArg::Type(TypeRef::Tuple(param_types)); | ||
215 | args.push(arg); | ||
216 | if let Some(ret_type) = ret_type { | 216 | if let Some(ret_type) = ret_type { |
217 | let type_ref = TypeRef::from_ast_opt(&ctx, ret_type.ty()); | 217 | let type_ref = TypeRef::from_ast_opt(&ctx, ret_type.ty()); |
218 | bindings.push(AssociatedTypeBinding { | 218 | bindings.push(AssociatedTypeBinding { |
@@ -220,10 +220,14 @@ fn lower_generic_args_from_fn_path( | |||
220 | type_ref: Some(type_ref), | 220 | type_ref: Some(type_ref), |
221 | bounds: Vec::new(), | 221 | bounds: Vec::new(), |
222 | }); | 222 | }); |
223 | } | ||
224 | if args.is_empty() && bindings.is_empty() { | ||
225 | None | ||
226 | } else { | 223 | } else { |
227 | Some(GenericArgs { args, has_self_type: false, bindings }) | 224 | // -> () |
225 | let type_ref = TypeRef::Tuple(Vec::new()); | ||
226 | bindings.push(AssociatedTypeBinding { | ||
227 | name: name![Output], | ||
228 | type_ref: Some(type_ref), | ||
229 | bounds: Vec::new(), | ||
230 | }); | ||
228 | } | 231 | } |
232 | Some(GenericArgs { args, has_self_type: false, bindings }) | ||
229 | } | 233 | } |
diff --git a/crates/hir_def/src/path/lower/lower_use.rs b/crates/hir_def/src/path/lower/lower_use.rs index ee80e3df3..0ee406f63 100644 --- a/crates/hir_def/src/path/lower/lower_use.rs +++ b/crates/hir_def/src/path/lower/lower_use.rs | |||
@@ -4,68 +4,15 @@ | |||
4 | use std::iter; | 4 | use std::iter; |
5 | 5 | ||
6 | use either::Either; | 6 | use either::Either; |
7 | use hir_expand::{hygiene::Hygiene, name::AsName}; | 7 | use hir_expand::hygiene::Hygiene; |
8 | use syntax::ast::{self, NameOwner}; | 8 | use syntax::{ast, AstNode}; |
9 | 9 | ||
10 | use crate::{ | 10 | use crate::{ |
11 | db::DefDatabase, | 11 | db::DefDatabase, |
12 | path::{ImportAlias, ModPath, PathKind}, | 12 | path::{ModPath, PathKind}, |
13 | }; | 13 | }; |
14 | 14 | ||
15 | pub(crate) fn lower_use_tree( | 15 | pub(crate) fn convert_path( |
16 | db: &dyn DefDatabase, | ||
17 | prefix: Option<ModPath>, | ||
18 | tree: ast::UseTree, | ||
19 | hygiene: &Hygiene, | ||
20 | cb: &mut dyn FnMut(ModPath, &ast::UseTree, bool, Option<ImportAlias>), | ||
21 | ) { | ||
22 | if let Some(use_tree_list) = tree.use_tree_list() { | ||
23 | let prefix = match tree.path() { | ||
24 | // E.g. use something::{{{inner}}}; | ||
25 | None => prefix, | ||
26 | // E.g. `use something::{inner}` (prefix is `None`, path is `something`) | ||
27 | // or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`) | ||
28 | Some(path) => match convert_path(db, prefix, path, hygiene) { | ||
29 | Some(it) => Some(it), | ||
30 | None => return, // FIXME: report errors somewhere | ||
31 | }, | ||
32 | }; | ||
33 | for child_tree in use_tree_list.use_trees() { | ||
34 | lower_use_tree(db, prefix.clone(), child_tree, hygiene, cb); | ||
35 | } | ||
36 | } else { | ||
37 | let alias = tree.rename().map(|a| { | ||
38 | a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias) | ||
39 | }); | ||
40 | let is_glob = tree.star_token().is_some(); | ||
41 | if let Some(ast_path) = tree.path() { | ||
42 | // Handle self in a path. | ||
43 | // E.g. `use something::{self, <...>}` | ||
44 | if ast_path.qualifier().is_none() { | ||
45 | if let Some(segment) = ast_path.segment() { | ||
46 | if segment.kind() == Some(ast::PathSegmentKind::SelfKw) { | ||
47 | if let Some(prefix) = prefix { | ||
48 | cb(prefix, &tree, false, alias); | ||
49 | return; | ||
50 | } | ||
51 | } | ||
52 | } | ||
53 | } | ||
54 | if let Some(path) = convert_path(db, prefix, ast_path, hygiene) { | ||
55 | cb(path, &tree, is_glob, alias) | ||
56 | } | ||
57 | // FIXME: report errors somewhere | ||
58 | // We get here if we do | ||
59 | } else if is_glob { | ||
60 | cov_mark::hit!(glob_enum_group); | ||
61 | if let Some(prefix) = prefix { | ||
62 | cb(prefix, &tree, is_glob, None) | ||
63 | } | ||
64 | } | ||
65 | } | ||
66 | } | ||
67 | |||
68 | fn convert_path( | ||
69 | db: &dyn DefDatabase, | 16 | db: &dyn DefDatabase, |
70 | prefix: Option<ModPath>, | 17 | prefix: Option<ModPath>, |
71 | path: ast::Path, | 18 | path: ast::Path, |
@@ -78,7 +25,7 @@ fn convert_path( | |||
78 | }; | 25 | }; |
79 | 26 | ||
80 | let segment = path.segment()?; | 27 | let segment = path.segment()?; |
81 | let res = match segment.kind()? { | 28 | let mut mod_path = match segment.kind()? { |
82 | ast::PathSegmentKind::Name(name_ref) => { | 29 | ast::PathSegmentKind::Name(name_ref) => { |
83 | match hygiene.name_ref_to_name(db.upcast(), name_ref) { | 30 | match hygiene.name_ref_to_name(db.upcast(), name_ref) { |
84 | Either::Left(name) => { | 31 | Either::Left(name) => { |
@@ -125,5 +72,18 @@ fn convert_path( | |||
125 | return None; | 72 | return None; |
126 | } | 73 | } |
127 | }; | 74 | }; |
128 | Some(res) | 75 | |
76 | // handle local_inner_macros : | ||
77 | // Basically, even in rustc it is quite hacky: | ||
78 | // https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456 | ||
79 | // We follow what it did anyway :) | ||
80 | if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain { | ||
81 | if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { | ||
82 | if let Some(crate_id) = hygiene.local_inner_macros(db.upcast(), path) { | ||
83 | mod_path.kind = PathKind::DollarCrate(crate_id); | ||
84 | } | ||
85 | } | ||
86 | } | ||
87 | |||
88 | Some(mod_path) | ||
129 | } | 89 | } |
diff --git a/crates/hir_def/src/resolver.rs b/crates/hir_def/src/resolver.rs index 0391cc49b..fb8a6f260 100644 --- a/crates/hir_def/src/resolver.rs +++ b/crates/hir_def/src/resolver.rs | |||
@@ -337,22 +337,34 @@ impl Resolver { | |||
337 | pub fn traits_in_scope(&self, db: &dyn DefDatabase) -> FxHashSet<TraitId> { | 337 | pub fn traits_in_scope(&self, db: &dyn DefDatabase) -> FxHashSet<TraitId> { |
338 | let mut traits = FxHashSet::default(); | 338 | let mut traits = FxHashSet::default(); |
339 | for scope in &self.scopes { | 339 | for scope in &self.scopes { |
340 | if let Scope::ModuleScope(m) = scope { | 340 | match scope { |
341 | if let Some(prelude) = m.def_map.prelude() { | 341 | Scope::ModuleScope(m) => { |
342 | let prelude_def_map = prelude.def_map(db); | 342 | if let Some(prelude) = m.def_map.prelude() { |
343 | traits.extend(prelude_def_map[prelude.local_id].scope.traits()); | ||
344 | } | ||
345 | traits.extend(m.def_map[m.module_id].scope.traits()); | ||
346 | |||
347 | // Add all traits that are in scope because of the containing DefMaps | ||
348 | m.def_map.with_ancestor_maps(db, m.module_id, &mut |def_map, module| { | ||
349 | if let Some(prelude) = def_map.prelude() { | ||
350 | let prelude_def_map = prelude.def_map(db); | 343 | let prelude_def_map = prelude.def_map(db); |
351 | traits.extend(prelude_def_map[prelude.local_id].scope.traits()); | 344 | traits.extend(prelude_def_map[prelude.local_id].scope.traits()); |
352 | } | 345 | } |
353 | traits.extend(def_map[module].scope.traits()); | 346 | traits.extend(m.def_map[m.module_id].scope.traits()); |
354 | None::<()> | 347 | |
355 | }); | 348 | // Add all traits that are in scope because of the containing DefMaps |
349 | m.def_map.with_ancestor_maps(db, m.module_id, &mut |def_map, module| { | ||
350 | if let Some(prelude) = def_map.prelude() { | ||
351 | let prelude_def_map = prelude.def_map(db); | ||
352 | traits.extend(prelude_def_map[prelude.local_id].scope.traits()); | ||
353 | } | ||
354 | traits.extend(def_map[module].scope.traits()); | ||
355 | None::<()> | ||
356 | }); | ||
357 | } | ||
358 | &Scope::ImplDefScope(impl_) => { | ||
359 | if let Some(target_trait) = &db.impl_data(impl_).target_trait { | ||
360 | if let Some(TypeNs::TraitId(trait_)) = | ||
361 | self.resolve_path_in_type_ns_fully(db, target_trait.path.mod_path()) | ||
362 | { | ||
363 | traits.insert(trait_); | ||
364 | } | ||
365 | } | ||
366 | } | ||
367 | _ => (), | ||
356 | } | 368 | } |
357 | } | 369 | } |
358 | traits | 370 | traits |
diff --git a/crates/hir_def/src/test_db.rs b/crates/hir_def/src/test_db.rs index 8fa703a57..a9c1e13e2 100644 --- a/crates/hir_def/src/test_db.rs +++ b/crates/hir_def/src/test_db.rs | |||
@@ -5,19 +5,20 @@ use std::{ | |||
5 | sync::{Arc, Mutex}, | 5 | sync::{Arc, Mutex}, |
6 | }; | 6 | }; |
7 | 7 | ||
8 | use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, FilePosition, Upcast}; | 8 | use base_db::{ |
9 | salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, FilePosition, FileRange, Upcast, | ||
10 | }; | ||
9 | use base_db::{AnchoredPath, SourceDatabase}; | 11 | use base_db::{AnchoredPath, SourceDatabase}; |
10 | use hir_expand::diagnostics::Diagnostic; | ||
11 | use hir_expand::diagnostics::DiagnosticSinkBuilder; | ||
12 | use hir_expand::{db::AstDatabase, InFile}; | 12 | use hir_expand::{db::AstDatabase, InFile}; |
13 | use rustc_hash::FxHashMap; | 13 | use rustc_hash::FxHashMap; |
14 | use rustc_hash::FxHashSet; | 14 | use rustc_hash::FxHashSet; |
15 | use syntax::{algo, ast, AstNode, TextRange, TextSize}; | 15 | use syntax::{algo, ast, AstNode, SyntaxNode, SyntaxNodePtr, TextRange, TextSize}; |
16 | use test_utils::extract_annotations; | 16 | use test_utils::extract_annotations; |
17 | 17 | ||
18 | use crate::{ | 18 | use crate::{ |
19 | body::BodyDiagnostic, | ||
19 | db::DefDatabase, | 20 | db::DefDatabase, |
20 | nameres::{DefMap, ModuleSource}, | 21 | nameres::{diagnostics::DefDiagnosticKind, DefMap, ModuleSource}, |
21 | src::HasSource, | 22 | src::HasSource, |
22 | LocalModuleId, Lookup, ModuleDefId, ModuleId, | 23 | LocalModuleId, Lookup, ModuleDefId, ModuleId, |
23 | }; | 24 | }; |
@@ -262,19 +263,72 @@ impl TestDB { | |||
262 | .collect() | 263 | .collect() |
263 | } | 264 | } |
264 | 265 | ||
265 | pub(crate) fn diagnostics<F: FnMut(&dyn Diagnostic)>(&self, mut cb: F) { | 266 | pub(crate) fn diagnostics(&self, cb: &mut dyn FnMut(FileRange, String)) { |
266 | let crate_graph = self.crate_graph(); | 267 | let crate_graph = self.crate_graph(); |
267 | for krate in crate_graph.iter() { | 268 | for krate in crate_graph.iter() { |
268 | let crate_def_map = self.crate_def_map(krate); | 269 | let crate_def_map = self.crate_def_map(krate); |
269 | 270 | ||
270 | let mut sink = DiagnosticSinkBuilder::new().build(&mut cb); | 271 | for diag in crate_def_map.diagnostics() { |
271 | for (module_id, module) in crate_def_map.modules() { | 272 | let (node, message): (InFile<SyntaxNode>, &str) = match &diag.kind { |
272 | crate_def_map.add_diagnostics(self, module_id, &mut sink); | 273 | DefDiagnosticKind::UnresolvedModule { ast, .. } => { |
274 | let node = ast.to_node(self.upcast()); | ||
275 | (InFile::new(ast.file_id, node.syntax().clone()), "UnresolvedModule") | ||
276 | } | ||
277 | DefDiagnosticKind::UnresolvedExternCrate { ast, .. } => { | ||
278 | let node = ast.to_node(self.upcast()); | ||
279 | (InFile::new(ast.file_id, node.syntax().clone()), "UnresolvedExternCrate") | ||
280 | } | ||
281 | DefDiagnosticKind::UnresolvedImport { id, .. } => { | ||
282 | let item_tree = id.item_tree(self.upcast()); | ||
283 | let import = &item_tree[id.value]; | ||
284 | let node = InFile::new(id.file_id(), import.ast_id).to_node(self.upcast()); | ||
285 | (InFile::new(id.file_id(), node.syntax().clone()), "UnresolvedImport") | ||
286 | } | ||
287 | DefDiagnosticKind::UnconfiguredCode { ast, .. } => { | ||
288 | let node = ast.to_node(self.upcast()); | ||
289 | (InFile::new(ast.file_id, node.syntax().clone()), "UnconfiguredCode") | ||
290 | } | ||
291 | DefDiagnosticKind::UnresolvedProcMacro { ast, .. } => { | ||
292 | (ast.to_node(self.upcast()), "UnresolvedProcMacro") | ||
293 | } | ||
294 | DefDiagnosticKind::UnresolvedMacroCall { ast, .. } => { | ||
295 | let node = ast.to_node(self.upcast()); | ||
296 | (InFile::new(ast.file_id, node.syntax().clone()), "UnresolvedMacroCall") | ||
297 | } | ||
298 | DefDiagnosticKind::MacroError { ast, message } => { | ||
299 | (ast.to_node(self.upcast()), message.as_str()) | ||
300 | } | ||
301 | }; | ||
302 | |||
303 | let frange = node.as_ref().original_file_range(self); | ||
304 | cb(frange, message.to_string()) | ||
305 | } | ||
273 | 306 | ||
307 | for (_module_id, module) in crate_def_map.modules() { | ||
274 | for decl in module.scope.declarations() { | 308 | for decl in module.scope.declarations() { |
275 | if let ModuleDefId::FunctionId(it) = decl { | 309 | if let ModuleDefId::FunctionId(it) = decl { |
276 | let source_map = self.body_with_source_map(it.into()).1; | 310 | let source_map = self.body_with_source_map(it.into()).1; |
277 | source_map.add_diagnostics(self, &mut sink); | 311 | for diag in source_map.diagnostics() { |
312 | let (ptr, message): (InFile<SyntaxNodePtr>, &str) = match diag { | ||
313 | BodyDiagnostic::InactiveCode { node, .. } => { | ||
314 | (node.clone().map(|it| it.into()), "InactiveCode") | ||
315 | } | ||
316 | BodyDiagnostic::MacroError { node, message } => { | ||
317 | (node.clone().map(|it| it.into()), message.as_str()) | ||
318 | } | ||
319 | BodyDiagnostic::UnresolvedProcMacro { node } => { | ||
320 | (node.clone().map(|it| it.into()), "UnresolvedProcMacro") | ||
321 | } | ||
322 | BodyDiagnostic::UnresolvedMacroCall { node, .. } => { | ||
323 | (node.clone().map(|it| it.into()), "UnresolvedMacroCall") | ||
324 | } | ||
325 | }; | ||
326 | |||
327 | let root = self.parse_or_expand(ptr.file_id).unwrap(); | ||
328 | let node = ptr.map(|ptr| ptr.to_node(&root)); | ||
329 | let frange = node.as_ref().original_file_range(self); | ||
330 | cb(frange, message.to_string()) | ||
331 | } | ||
278 | } | 332 | } |
279 | } | 333 | } |
280 | } | 334 | } |
@@ -287,14 +341,7 @@ impl TestDB { | |||
287 | assert!(!annotations.is_empty()); | 341 | assert!(!annotations.is_empty()); |
288 | 342 | ||
289 | let mut actual: FxHashMap<FileId, Vec<(TextRange, String)>> = FxHashMap::default(); | 343 | let mut actual: FxHashMap<FileId, Vec<(TextRange, String)>> = FxHashMap::default(); |
290 | db.diagnostics(|d| { | 344 | db.diagnostics(&mut |frange, message| { |
291 | let src = d.display_source(); | ||
292 | let root = db.parse_or_expand(src.file_id).unwrap(); | ||
293 | |||
294 | let node = src.map(|ptr| ptr.to_node(&root)); | ||
295 | let frange = node.as_ref().original_file_range(db); | ||
296 | |||
297 | let message = d.message(); | ||
298 | actual.entry(frange.file_id).or_default().push((frange.range, message)); | 345 | actual.entry(frange.file_id).or_default().push((frange.range, message)); |
299 | }); | 346 | }); |
300 | 347 | ||
@@ -319,7 +366,7 @@ impl TestDB { | |||
319 | assert!(annotations.is_empty()); | 366 | assert!(annotations.is_empty()); |
320 | 367 | ||
321 | let mut has_diagnostics = false; | 368 | let mut has_diagnostics = false; |
322 | db.diagnostics(|_| { | 369 | db.diagnostics(&mut |_, _| { |
323 | has_diagnostics = true; | 370 | has_diagnostics = true; |
324 | }); | 371 | }); |
325 | 372 | ||
diff --git a/crates/hir_def/src/type_ref.rs b/crates/hir_def/src/type_ref.rs index cdcab7110..cbde6b940 100644 --- a/crates/hir_def/src/type_ref.rs +++ b/crates/hir_def/src/type_ref.rs | |||
@@ -5,7 +5,7 @@ use hir_expand::{name::Name, AstId, InFile}; | |||
5 | use std::convert::TryInto; | 5 | use std::convert::TryInto; |
6 | use syntax::ast; | 6 | use syntax::ast; |
7 | 7 | ||
8 | use crate::{body::LowerCtx, path::Path}; | 8 | use crate::{body::LowerCtx, intern::Interned, path::Path}; |
9 | 9 | ||
10 | #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] | 10 | #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] |
11 | pub enum Mutability { | 11 | pub enum Mutability { |
@@ -91,8 +91,8 @@ pub enum TypeRef { | |||
91 | /// A fn pointer. Last element of the vector is the return type. | 91 | /// A fn pointer. Last element of the vector is the return type. |
92 | Fn(Vec<TypeRef>, bool /*varargs*/), | 92 | Fn(Vec<TypeRef>, bool /*varargs*/), |
93 | // For | 93 | // For |
94 | ImplTrait(Vec<TypeBound>), | 94 | ImplTrait(Vec<Interned<TypeBound>>), |
95 | DynTrait(Vec<TypeBound>), | 95 | DynTrait(Vec<Interned<TypeBound>>), |
96 | Macro(AstId<ast::MacroCall>), | 96 | Macro(AstId<ast::MacroCall>), |
97 | Error, | 97 | Error, |
98 | } | 98 | } |
@@ -232,7 +232,7 @@ impl TypeRef { | |||
232 | | TypeRef::Slice(type_ref) => go(&type_ref, f), | 232 | | TypeRef::Slice(type_ref) => go(&type_ref, f), |
233 | TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => { | 233 | TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => { |
234 | for bound in bounds { | 234 | for bound in bounds { |
235 | match bound { | 235 | match bound.as_ref() { |
236 | TypeBound::Path(path) => go_path(path, f), | 236 | TypeBound::Path(path) => go_path(path, f), |
237 | TypeBound::Lifetime(_) | TypeBound::Error => (), | 237 | TypeBound::Lifetime(_) | TypeBound::Error => (), |
238 | } | 238 | } |
@@ -262,7 +262,7 @@ impl TypeRef { | |||
262 | go(type_ref, f); | 262 | go(type_ref, f); |
263 | } | 263 | } |
264 | for bound in &binding.bounds { | 264 | for bound in &binding.bounds { |
265 | match bound { | 265 | match bound.as_ref() { |
266 | TypeBound::Path(path) => go_path(path, f), | 266 | TypeBound::Path(path) => go_path(path, f), |
267 | TypeBound::Lifetime(_) | TypeBound::Error => (), | 267 | TypeBound::Lifetime(_) | TypeBound::Error => (), |
268 | } | 268 | } |
@@ -277,9 +277,9 @@ impl TypeRef { | |||
277 | pub(crate) fn type_bounds_from_ast( | 277 | pub(crate) fn type_bounds_from_ast( |
278 | lower_ctx: &LowerCtx, | 278 | lower_ctx: &LowerCtx, |
279 | type_bounds_opt: Option<ast::TypeBoundList>, | 279 | type_bounds_opt: Option<ast::TypeBoundList>, |
280 | ) -> Vec<TypeBound> { | 280 | ) -> Vec<Interned<TypeBound>> { |
281 | if let Some(type_bounds) = type_bounds_opt { | 281 | if let Some(type_bounds) = type_bounds_opt { |
282 | type_bounds.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)).collect() | 282 | type_bounds.bounds().map(|it| Interned::new(TypeBound::from_ast(lower_ctx, it))).collect() |
283 | } else { | 283 | } else { |
284 | vec![] | 284 | vec![] |
285 | } | 285 | } |
diff --git a/crates/hir_expand/src/builtin_macro.rs b/crates/hir_expand/src/builtin_macro.rs index 94d7aecb6..0b310ba2f 100644 --- a/crates/hir_expand/src/builtin_macro.rs +++ b/crates/hir_expand/src/builtin_macro.rs | |||
@@ -8,7 +8,6 @@ use base_db::{AnchoredPath, Edition, FileId}; | |||
8 | use cfg::CfgExpr; | 8 | use cfg::CfgExpr; |
9 | use either::Either; | 9 | use either::Either; |
10 | use mbe::{parse_exprs_with_sep, parse_to_token_tree, ExpandResult}; | 10 | use mbe::{parse_exprs_with_sep, parse_to_token_tree, ExpandResult}; |
11 | use parser::FragmentKind; | ||
12 | use syntax::ast::{self, AstToken}; | 11 | use syntax::ast::{self, AstToken}; |
13 | 12 | ||
14 | macro_rules! register_builtin { | 13 | macro_rules! register_builtin { |
@@ -47,7 +46,7 @@ macro_rules! register_builtin { | |||
47 | let expander = match *self { | 46 | let expander = match *self { |
48 | $( EagerExpander::$e_kind => $e_expand, )* | 47 | $( EagerExpander::$e_kind => $e_expand, )* |
49 | }; | 48 | }; |
50 | expander(db,arg_id,tt) | 49 | expander(db, arg_id, tt) |
51 | } | 50 | } |
52 | } | 51 | } |
53 | 52 | ||
@@ -64,14 +63,13 @@ macro_rules! register_builtin { | |||
64 | #[derive(Debug)] | 63 | #[derive(Debug)] |
65 | pub struct ExpandedEager { | 64 | pub struct ExpandedEager { |
66 | pub(crate) subtree: tt::Subtree, | 65 | pub(crate) subtree: tt::Subtree, |
67 | pub(crate) fragment: FragmentKind, | ||
68 | /// The included file ID of the include macro. | 66 | /// The included file ID of the include macro. |
69 | pub(crate) included_file: Option<FileId>, | 67 | pub(crate) included_file: Option<FileId>, |
70 | } | 68 | } |
71 | 69 | ||
72 | impl ExpandedEager { | 70 | impl ExpandedEager { |
73 | fn new(subtree: tt::Subtree, fragment: FragmentKind) -> Self { | 71 | fn new(subtree: tt::Subtree) -> Self { |
74 | ExpandedEager { subtree, fragment, included_file: None } | 72 | ExpandedEager { subtree, included_file: None } |
75 | } | 73 | } |
76 | } | 74 | } |
77 | 75 | ||
@@ -340,7 +338,7 @@ fn compile_error_expand( | |||
340 | _ => mbe::ExpandError::BindingError("`compile_error!` argument must be a string".into()), | 338 | _ => mbe::ExpandError::BindingError("`compile_error!` argument must be a string".into()), |
341 | }; | 339 | }; |
342 | 340 | ||
343 | ExpandResult { value: Some(ExpandedEager::new(quote! {}, FragmentKind::Items)), err: Some(err) } | 341 | ExpandResult { value: Some(ExpandedEager::new(quote! {})), err: Some(err) } |
344 | } | 342 | } |
345 | 343 | ||
346 | fn concat_expand( | 344 | fn concat_expand( |
@@ -371,7 +369,7 @@ fn concat_expand( | |||
371 | } | 369 | } |
372 | } | 370 | } |
373 | } | 371 | } |
374 | ExpandResult { value: Some(ExpandedEager::new(quote!(#text), FragmentKind::Expr)), err } | 372 | ExpandResult { value: Some(ExpandedEager::new(quote!(#text))), err } |
375 | } | 373 | } |
376 | 374 | ||
377 | fn concat_idents_expand( | 375 | fn concat_idents_expand( |
@@ -393,7 +391,7 @@ fn concat_idents_expand( | |||
393 | } | 391 | } |
394 | } | 392 | } |
395 | let ident = tt::Ident { text: ident.into(), id: tt::TokenId::unspecified() }; | 393 | let ident = tt::Ident { text: ident.into(), id: tt::TokenId::unspecified() }; |
396 | ExpandResult { value: Some(ExpandedEager::new(quote!(#ident), FragmentKind::Expr)), err } | 394 | ExpandResult { value: Some(ExpandedEager::new(quote!(#ident))), err } |
397 | } | 395 | } |
398 | 396 | ||
399 | fn relative_file( | 397 | fn relative_file( |
@@ -442,14 +440,7 @@ fn include_expand( | |||
442 | 440 | ||
443 | match res { | 441 | match res { |
444 | Ok((subtree, file_id)) => { | 442 | Ok((subtree, file_id)) => { |
445 | // FIXME: | 443 | ExpandResult::ok(Some(ExpandedEager { subtree, included_file: Some(file_id) })) |
446 | // Handle include as expression | ||
447 | |||
448 | ExpandResult::ok(Some(ExpandedEager { | ||
449 | subtree, | ||
450 | fragment: FragmentKind::Items, | ||
451 | included_file: Some(file_id), | ||
452 | })) | ||
453 | } | 444 | } |
454 | Err(e) => ExpandResult::only_err(e), | 445 | Err(e) => ExpandResult::only_err(e), |
455 | } | 446 | } |
@@ -472,7 +463,7 @@ fn include_bytes_expand( | |||
472 | id: tt::TokenId::unspecified(), | 463 | id: tt::TokenId::unspecified(), |
473 | }))], | 464 | }))], |
474 | }; | 465 | }; |
475 | ExpandResult::ok(Some(ExpandedEager::new(res, FragmentKind::Expr))) | 466 | ExpandResult::ok(Some(ExpandedEager::new(res))) |
476 | } | 467 | } |
477 | 468 | ||
478 | fn include_str_expand( | 469 | fn include_str_expand( |
@@ -492,14 +483,14 @@ fn include_str_expand( | |||
492 | let file_id = match relative_file(db, arg_id.into(), &path, true) { | 483 | let file_id = match relative_file(db, arg_id.into(), &path, true) { |
493 | Ok(file_id) => file_id, | 484 | Ok(file_id) => file_id, |
494 | Err(_) => { | 485 | Err(_) => { |
495 | return ExpandResult::ok(Some(ExpandedEager::new(quote!(""), FragmentKind::Expr))); | 486 | return ExpandResult::ok(Some(ExpandedEager::new(quote!("")))); |
496 | } | 487 | } |
497 | }; | 488 | }; |
498 | 489 | ||
499 | let text = db.file_text(file_id); | 490 | let text = db.file_text(file_id); |
500 | let text = &*text; | 491 | let text = &*text; |
501 | 492 | ||
502 | ExpandResult::ok(Some(ExpandedEager::new(quote!(#text), FragmentKind::Expr))) | 493 | ExpandResult::ok(Some(ExpandedEager::new(quote!(#text)))) |
503 | } | 494 | } |
504 | 495 | ||
505 | fn get_env_inner(db: &dyn AstDatabase, arg_id: MacroCallId, key: &str) -> Option<String> { | 496 | fn get_env_inner(db: &dyn AstDatabase, arg_id: MacroCallId, key: &str) -> Option<String> { |
@@ -535,7 +526,7 @@ fn env_expand( | |||
535 | }); | 526 | }); |
536 | let expanded = quote! { #s }; | 527 | let expanded = quote! { #s }; |
537 | 528 | ||
538 | ExpandResult { value: Some(ExpandedEager::new(expanded, FragmentKind::Expr)), err } | 529 | ExpandResult { value: Some(ExpandedEager::new(expanded)), err } |
539 | } | 530 | } |
540 | 531 | ||
541 | fn option_env_expand( | 532 | fn option_env_expand( |
@@ -553,7 +544,7 @@ fn option_env_expand( | |||
553 | Some(s) => quote! { std::option::Some(#s) }, | 544 | Some(s) => quote! { std::option::Some(#s) }, |
554 | }; | 545 | }; |
555 | 546 | ||
556 | ExpandResult::ok(Some(ExpandedEager::new(expanded, FragmentKind::Expr))) | 547 | ExpandResult::ok(Some(ExpandedEager::new(expanded))) |
557 | } | 548 | } |
558 | 549 | ||
559 | #[cfg(test)] | 550 | #[cfg(test)] |
@@ -565,6 +556,7 @@ mod tests { | |||
565 | }; | 556 | }; |
566 | use base_db::{fixture::WithFixture, SourceDatabase}; | 557 | use base_db::{fixture::WithFixture, SourceDatabase}; |
567 | use expect_test::{expect, Expect}; | 558 | use expect_test::{expect, Expect}; |
559 | use parser::FragmentKind; | ||
568 | use std::sync::Arc; | 560 | use std::sync::Arc; |
569 | use syntax::ast::NameOwner; | 561 | use syntax::ast::NameOwner; |
570 | 562 | ||
@@ -617,6 +609,7 @@ mod tests { | |||
617 | local_inner: false, | 609 | local_inner: false, |
618 | }; | 610 | }; |
619 | 611 | ||
612 | let fragment = crate::to_fragment_kind(¯o_call); | ||
620 | let args = macro_call.token_tree().unwrap(); | 613 | let args = macro_call.token_tree().unwrap(); |
621 | let parsed_args = mbe::ast_to_token_tree(&args).0; | 614 | let parsed_args = mbe::ast_to_token_tree(&args).0; |
622 | let call_id = AstId::new(file_id.into(), ast_id_map.ast_id(¯o_call)); | 615 | let call_id = AstId::new(file_id.into(), ast_id_map.ast_id(¯o_call)); |
@@ -639,7 +632,7 @@ mod tests { | |||
639 | arg_or_expansion: Arc::new(expanded.subtree), | 632 | arg_or_expansion: Arc::new(expanded.subtree), |
640 | included_file: expanded.included_file, | 633 | included_file: expanded.included_file, |
641 | }), | 634 | }), |
642 | kind: MacroCallKind::FnLike { ast_id: call_id, fragment: expanded.fragment }, | 635 | kind: MacroCallKind::FnLike { ast_id: call_id, fragment }, |
643 | }; | 636 | }; |
644 | 637 | ||
645 | let id: MacroCallId = db.intern_macro(loc).into(); | 638 | let id: MacroCallId = db.intern_macro(loc).into(); |
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index 5c769c1bf..e8f4af309 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs | |||
@@ -131,15 +131,15 @@ pub trait AstDatabase: SourceDatabase { | |||
131 | /// used for completion, where we want to see what 'would happen' if we insert a | 131 | /// used for completion, where we want to see what 'would happen' if we insert a |
132 | /// token. The `token_to_map` mapped down into the expansion, with the mapped | 132 | /// token. The `token_to_map` mapped down into the expansion, with the mapped |
133 | /// token returned. | 133 | /// token returned. |
134 | pub fn expand_hypothetical( | 134 | pub fn expand_speculative( |
135 | db: &dyn AstDatabase, | 135 | db: &dyn AstDatabase, |
136 | actual_macro_call: MacroCallId, | 136 | actual_macro_call: MacroCallId, |
137 | hypothetical_args: &ast::TokenTree, | 137 | speculative_args: &ast::TokenTree, |
138 | token_to_map: SyntaxToken, | 138 | token_to_map: SyntaxToken, |
139 | ) -> Option<(SyntaxNode, SyntaxToken)> { | 139 | ) -> Option<(SyntaxNode, SyntaxToken)> { |
140 | let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()); | 140 | let (tt, tmap_1) = mbe::syntax_node_to_token_tree(speculative_args.syntax()); |
141 | let range = | 141 | let range = |
142 | token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?; | 142 | token_to_map.text_range().checked_sub(speculative_args.syntax().text_range().start())?; |
143 | let token_id = tmap_1.token_by_range(range)?; | 143 | let token_id = tmap_1.token_by_range(range)?; |
144 | 144 | ||
145 | let macro_def = { | 145 | let macro_def = { |
@@ -147,15 +147,15 @@ pub fn expand_hypothetical( | |||
147 | db.macro_def(loc.def)? | 147 | db.macro_def(loc.def)? |
148 | }; | 148 | }; |
149 | 149 | ||
150 | let hypothetical_expansion = macro_def.expand(db, actual_macro_call, &tt); | 150 | let speculative_expansion = macro_def.expand(db, actual_macro_call, &tt); |
151 | 151 | ||
152 | let fragment_kind = macro_fragment_kind(db, actual_macro_call); | 152 | let fragment_kind = macro_fragment_kind(db, actual_macro_call); |
153 | 153 | ||
154 | let (node, tmap_2) = | 154 | let (node, tmap_2) = |
155 | mbe::token_tree_to_syntax_node(&hypothetical_expansion.value, fragment_kind).ok()?; | 155 | mbe::token_tree_to_syntax_node(&speculative_expansion.value, fragment_kind).ok()?; |
156 | 156 | ||
157 | let token_id = macro_def.map_id_down(token_id); | 157 | let token_id = macro_def.map_id_down(token_id); |
158 | let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; | 158 | let range = tmap_2.range_by_token(token_id, token_to_map.kind())?; |
159 | let token = node.syntax_node().covering_element(range).into_token()?; | 159 | let token = node.syntax_node().covering_element(range).into_token()?; |
160 | Some((node.syntax_node(), token)) | 160 | Some((node.syntax_node(), token)) |
161 | } | 161 | } |
@@ -186,7 +186,7 @@ fn parse_macro_expansion( | |||
186 | // The final goal we would like to make all parse_macro success, | 186 | // The final goal we would like to make all parse_macro success, |
187 | // such that the following log will not call anyway. | 187 | // such that the following log will not call anyway. |
188 | let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id); | 188 | let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id); |
189 | let node = loc.kind.node(db); | 189 | let node = loc.kind.to_node(db); |
190 | 190 | ||
191 | // collect parent information for warning log | 191 | // collect parent information for warning log |
192 | let parents = | 192 | let parents = |
@@ -325,7 +325,7 @@ fn macro_expand_with_arg( | |||
325 | if let Some(eager) = &loc.eager { | 325 | if let Some(eager) = &loc.eager { |
326 | if arg.is_some() { | 326 | if arg.is_some() { |
327 | return ExpandResult::str_err( | 327 | return ExpandResult::str_err( |
328 | "hypothetical macro expansion not implemented for eager macro".to_owned(), | 328 | "speculative macro expansion not implemented for eager macro".to_owned(), |
329 | ); | 329 | ); |
330 | } else { | 330 | } else { |
331 | return ExpandResult { | 331 | return ExpandResult { |
diff --git a/crates/hir_expand/src/eager.rs b/crates/hir_expand/src/eager.rs index e165b9c5f..14af628a1 100644 --- a/crates/hir_expand/src/eager.rs +++ b/crates/hir_expand/src/eager.rs | |||
@@ -113,6 +113,7 @@ pub fn expand_eager_macro( | |||
113 | 113 | ||
114 | let ast_map = db.ast_id_map(macro_call.file_id); | 114 | let ast_map = db.ast_id_map(macro_call.file_id); |
115 | let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(¯o_call.value)); | 115 | let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(¯o_call.value)); |
116 | let fragment = crate::to_fragment_kind(¯o_call.value); | ||
116 | 117 | ||
117 | // Note: | 118 | // Note: |
118 | // When `lazy_expand` is called, its *parent* file must be already exists. | 119 | // When `lazy_expand` is called, its *parent* file must be already exists. |
@@ -152,7 +153,7 @@ pub fn expand_eager_macro( | |||
152 | arg_or_expansion: Arc::new(expanded.subtree), | 153 | arg_or_expansion: Arc::new(expanded.subtree), |
153 | included_file: expanded.included_file, | 154 | included_file: expanded.included_file, |
154 | }), | 155 | }), |
155 | kind: MacroCallKind::FnLike { ast_id: call_id, fragment: expanded.fragment }, | 156 | kind: MacroCallKind::FnLike { ast_id: call_id, fragment }, |
156 | }; | 157 | }; |
157 | 158 | ||
158 | Ok(db.intern_macro(loc)) | 159 | Ok(db.intern_macro(loc)) |
@@ -197,7 +198,7 @@ fn eager_macro_recur( | |||
197 | macro_resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>, | 198 | macro_resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>, |
198 | mut diagnostic_sink: &mut dyn FnMut(mbe::ExpandError), | 199 | mut diagnostic_sink: &mut dyn FnMut(mbe::ExpandError), |
199 | ) -> Result<SyntaxNode, ErrorEmitted> { | 200 | ) -> Result<SyntaxNode, ErrorEmitted> { |
200 | let original = curr.value.clone().clone_for_update(); | 201 | let original = curr.value.clone_for_update(); |
201 | 202 | ||
202 | let children = original.descendants().filter_map(ast::MacroCall::cast); | 203 | let children = original.descendants().filter_map(ast::MacroCall::cast); |
203 | let mut replacements = Vec::new(); | 204 | let mut replacements = Vec::new(); |
diff --git a/crates/hir_expand/src/hygiene.rs b/crates/hir_expand/src/hygiene.rs index 38e09fdd4..d98913907 100644 --- a/crates/hir_expand/src/hygiene.rs +++ b/crates/hir_expand/src/hygiene.rs | |||
@@ -154,7 +154,7 @@ impl HygieneInfo { | |||
154 | }, | 154 | }, |
155 | }; | 155 | }; |
156 | 156 | ||
157 | let range = token_map.range_by_token(token_id)?.by_kind(SyntaxKind::IDENT)?; | 157 | let range = token_map.range_by_token(token_id, SyntaxKind::IDENT)?; |
158 | Some((tt.with_value(range + tt.value), origin)) | 158 | Some((tt.with_value(range + tt.value), origin)) |
159 | } | 159 | } |
160 | } | 160 | } |
diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index 92c679dd2..90d8ae240 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs | |||
@@ -8,7 +8,6 @@ pub mod db; | |||
8 | pub mod ast_id_map; | 8 | pub mod ast_id_map; |
9 | pub mod name; | 9 | pub mod name; |
10 | pub mod hygiene; | 10 | pub mod hygiene; |
11 | pub mod diagnostics; | ||
12 | pub mod builtin_derive; | 11 | pub mod builtin_derive; |
13 | pub mod builtin_macro; | 12 | pub mod builtin_macro; |
14 | pub mod proc_macro; | 13 | pub mod proc_macro; |
@@ -16,6 +15,7 @@ pub mod quote; | |||
16 | pub mod eager; | 15 | pub mod eager; |
17 | mod input; | 16 | mod input; |
18 | 17 | ||
18 | use base_db::ProcMacroKind; | ||
19 | use either::Either; | 19 | use either::Either; |
20 | 20 | ||
21 | pub use mbe::{ExpandError, ExpandResult}; | 21 | pub use mbe::{ExpandError, ExpandResult}; |
@@ -108,7 +108,7 @@ impl HirFileId { | |||
108 | HirFileIdRepr::FileId(_) => None, | 108 | HirFileIdRepr::FileId(_) => None, |
109 | HirFileIdRepr::MacroFile(macro_file) => { | 109 | HirFileIdRepr::MacroFile(macro_file) => { |
110 | let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id); | 110 | let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id); |
111 | Some(loc.kind.node(db)) | 111 | Some(loc.kind.to_node(db)) |
112 | } | 112 | } |
113 | } | 113 | } |
114 | } | 114 | } |
@@ -153,7 +153,7 @@ impl HirFileId { | |||
153 | HirFileIdRepr::MacroFile(macro_file) => { | 153 | HirFileIdRepr::MacroFile(macro_file) => { |
154 | let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id); | 154 | let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id); |
155 | let item = match loc.def.kind { | 155 | let item = match loc.def.kind { |
156 | MacroDefKind::BuiltInDerive(..) => loc.kind.node(db), | 156 | MacroDefKind::BuiltInDerive(..) => loc.kind.to_node(db), |
157 | _ => return None, | 157 | _ => return None, |
158 | }; | 158 | }; |
159 | Some(item.with_value(ast::Item::cast(item.value.clone())?)) | 159 | Some(item.with_value(ast::Item::cast(item.value.clone())?)) |
@@ -208,7 +208,7 @@ impl MacroDefId { | |||
208 | MacroDefKind::BuiltIn(_, id) => id, | 208 | MacroDefKind::BuiltIn(_, id) => id, |
209 | MacroDefKind::BuiltInDerive(_, id) => id, | 209 | MacroDefKind::BuiltInDerive(_, id) => id, |
210 | MacroDefKind::BuiltInEager(_, id) => id, | 210 | MacroDefKind::BuiltInEager(_, id) => id, |
211 | MacroDefKind::ProcMacro(_, id) => return Either::Right(*id), | 211 | MacroDefKind::ProcMacro(.., id) => return Either::Right(*id), |
212 | }; | 212 | }; |
213 | Either::Left(*id) | 213 | Either::Left(*id) |
214 | } | 214 | } |
@@ -225,7 +225,7 @@ pub enum MacroDefKind { | |||
225 | // FIXME: maybe just Builtin and rename BuiltinFnLikeExpander to BuiltinExpander | 225 | // FIXME: maybe just Builtin and rename BuiltinFnLikeExpander to BuiltinExpander |
226 | BuiltInDerive(BuiltinDeriveExpander, AstId<ast::Macro>), | 226 | BuiltInDerive(BuiltinDeriveExpander, AstId<ast::Macro>), |
227 | BuiltInEager(EagerExpander, AstId<ast::Macro>), | 227 | BuiltInEager(EagerExpander, AstId<ast::Macro>), |
228 | ProcMacro(ProcMacroExpander, AstId<ast::Fn>), | 228 | ProcMacro(ProcMacroExpander, ProcMacroKind, AstId<ast::Fn>), |
229 | } | 229 | } |
230 | 230 | ||
231 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 231 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
@@ -269,7 +269,7 @@ impl MacroCallKind { | |||
269 | } | 269 | } |
270 | } | 270 | } |
271 | 271 | ||
272 | fn node(&self, db: &dyn db::AstDatabase) -> InFile<SyntaxNode> { | 272 | pub fn to_node(&self, db: &dyn db::AstDatabase) -> InFile<SyntaxNode> { |
273 | match self { | 273 | match self { |
274 | MacroCallKind::FnLike { ast_id, .. } => { | 274 | MacroCallKind::FnLike { ast_id, .. } => { |
275 | ast_id.with_value(ast_id.to_node(db).syntax().clone()) | 275 | ast_id.with_value(ast_id.to_node(db).syntax().clone()) |
@@ -329,7 +329,7 @@ impl ExpansionInfo { | |||
329 | let token_id = self.macro_arg.1.token_by_range(range)?; | 329 | let token_id = self.macro_arg.1.token_by_range(range)?; |
330 | let token_id = self.macro_def.map_id_down(token_id); | 330 | let token_id = self.macro_def.map_id_down(token_id); |
331 | 331 | ||
332 | let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?; | 332 | let range = self.exp_map.range_by_token(token_id, token.value.kind())?; |
333 | 333 | ||
334 | let token = self.expanded.value.covering_element(range).into_token()?; | 334 | let token = self.expanded.value.covering_element(range).into_token()?; |
335 | 335 | ||
@@ -354,7 +354,7 @@ impl ExpansionInfo { | |||
354 | }, | 354 | }, |
355 | }; | 355 | }; |
356 | 356 | ||
357 | let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?; | 357 | let range = token_map.range_by_token(token_id, token.value.kind())?; |
358 | let token = | 358 | let token = |
359 | tt.value.covering_element(range + tt.value.text_range().start()).into_token()?; | 359 | tt.value.covering_element(range + tt.value.text_range().start()).into_token()?; |
360 | Some((tt.with_value(token), origin)) | 360 | Some((tt.with_value(token), origin)) |
diff --git a/crates/hir_ty/src/diagnostics.rs b/crates/hir_ty/src/diagnostics.rs index 84fc8ce14..7598e2193 100644 --- a/crates/hir_ty/src/diagnostics.rs +++ b/crates/hir_ty/src/diagnostics.rs | |||
@@ -8,12 +8,14 @@ use std::{any::Any, fmt}; | |||
8 | 8 | ||
9 | use base_db::CrateId; | 9 | use base_db::CrateId; |
10 | use hir_def::{DefWithBodyId, ModuleDefId}; | 10 | use hir_def::{DefWithBodyId, ModuleDefId}; |
11 | use hir_expand::diagnostics::{Diagnostic, DiagnosticCode, DiagnosticSink}; | ||
12 | use hir_expand::{name::Name, HirFileId, InFile}; | 11 | use hir_expand::{name::Name, HirFileId, InFile}; |
13 | use stdx::format_to; | 12 | use stdx::format_to; |
14 | use syntax::{ast, AstPtr, SyntaxNodePtr}; | 13 | use syntax::{ast, AstPtr, SyntaxNodePtr}; |
15 | 14 | ||
16 | use crate::db::HirDatabase; | 15 | use crate::{ |
16 | db::HirDatabase, | ||
17 | diagnostics_sink::{Diagnostic, DiagnosticCode, DiagnosticSink}, | ||
18 | }; | ||
17 | 19 | ||
18 | pub use crate::diagnostics::expr::{record_literal_missing_fields, record_pattern_missing_fields}; | 20 | pub use crate::diagnostics::expr::{record_literal_missing_fields, record_pattern_missing_fields}; |
19 | 21 | ||
@@ -446,15 +448,13 @@ impl Diagnostic for ReplaceFilterMapNextWithFindMap { | |||
446 | mod tests { | 448 | mod tests { |
447 | use base_db::{fixture::WithFixture, FileId, SourceDatabase, SourceDatabaseExt}; | 449 | use base_db::{fixture::WithFixture, FileId, SourceDatabase, SourceDatabaseExt}; |
448 | use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId}; | 450 | use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId}; |
449 | use hir_expand::{ | 451 | use hir_expand::db::AstDatabase; |
450 | db::AstDatabase, | ||
451 | diagnostics::{Diagnostic, DiagnosticSinkBuilder}, | ||
452 | }; | ||
453 | use rustc_hash::FxHashMap; | 452 | use rustc_hash::FxHashMap; |
454 | use syntax::{TextRange, TextSize}; | 453 | use syntax::{TextRange, TextSize}; |
455 | 454 | ||
456 | use crate::{ | 455 | use crate::{ |
457 | diagnostics::{validate_body, validate_module_item}, | 456 | diagnostics::{validate_body, validate_module_item}, |
457 | diagnostics_sink::{Diagnostic, DiagnosticSinkBuilder}, | ||
458 | test_db::TestDB, | 458 | test_db::TestDB, |
459 | }; | 459 | }; |
460 | 460 | ||
diff --git a/crates/hir_ty/src/diagnostics/decl_check.rs b/crates/hir_ty/src/diagnostics/decl_check.rs index 075dc4131..ef982cbcd 100644 --- a/crates/hir_ty/src/diagnostics/decl_check.rs +++ b/crates/hir_ty/src/diagnostics/decl_check.rs | |||
@@ -19,10 +19,7 @@ use hir_def::{ | |||
19 | src::HasSource, | 19 | src::HasSource, |
20 | AdtId, AttrDefId, ConstId, EnumId, FunctionId, Lookup, ModuleDefId, StaticId, StructId, | 20 | AdtId, AttrDefId, ConstId, EnumId, FunctionId, Lookup, ModuleDefId, StaticId, StructId, |
21 | }; | 21 | }; |
22 | use hir_expand::{ | 22 | use hir_expand::name::{AsName, Name}; |
23 | diagnostics::DiagnosticSink, | ||
24 | name::{AsName, Name}, | ||
25 | }; | ||
26 | use stdx::{always, never}; | 23 | use stdx::{always, never}; |
27 | use syntax::{ | 24 | use syntax::{ |
28 | ast::{self, NameOwner}, | 25 | ast::{self, NameOwner}, |
@@ -32,6 +29,7 @@ use syntax::{ | |||
32 | use crate::{ | 29 | use crate::{ |
33 | db::HirDatabase, | 30 | db::HirDatabase, |
34 | diagnostics::{decl_check::case_conv::*, CaseType, IdentType, IncorrectCase}, | 31 | diagnostics::{decl_check::case_conv::*, CaseType, IdentType, IncorrectCase}, |
32 | diagnostics_sink::DiagnosticSink, | ||
35 | }; | 33 | }; |
36 | 34 | ||
37 | mod allow { | 35 | mod allow { |
diff --git a/crates/hir_ty/src/diagnostics/expr.rs b/crates/hir_ty/src/diagnostics/expr.rs index d1f113e7f..86f82e3fa 100644 --- a/crates/hir_ty/src/diagnostics/expr.rs +++ b/crates/hir_ty/src/diagnostics/expr.rs | |||
@@ -5,7 +5,7 @@ | |||
5 | use std::sync::Arc; | 5 | use std::sync::Arc; |
6 | 6 | ||
7 | use hir_def::{expr::Statement, path::path, resolver::HasResolver, AssocItemId, DefWithBodyId}; | 7 | use hir_def::{expr::Statement, path::path, resolver::HasResolver, AssocItemId, DefWithBodyId}; |
8 | use hir_expand::{diagnostics::DiagnosticSink, name}; | 8 | use hir_expand::name; |
9 | use rustc_hash::FxHashSet; | 9 | use rustc_hash::FxHashSet; |
10 | use syntax::{ast, AstPtr}; | 10 | use syntax::{ast, AstPtr}; |
11 | 11 | ||
@@ -16,6 +16,7 @@ use crate::{ | |||
16 | MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkOrSomeInTailExpr, | 16 | MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkOrSomeInTailExpr, |
17 | MissingPatFields, RemoveThisSemicolon, | 17 | MissingPatFields, RemoveThisSemicolon, |
18 | }, | 18 | }, |
19 | diagnostics_sink::DiagnosticSink, | ||
19 | AdtId, InferenceResult, Interner, TyExt, TyKind, | 20 | AdtId, InferenceResult, Interner, TyExt, TyKind, |
20 | }; | 21 | }; |
21 | 22 | ||
diff --git a/crates/hir_ty/src/diagnostics/unsafe_check.rs b/crates/hir_ty/src/diagnostics/unsafe_check.rs index 5d13bddea..c3c483425 100644 --- a/crates/hir_ty/src/diagnostics/unsafe_check.rs +++ b/crates/hir_ty/src/diagnostics/unsafe_check.rs | |||
@@ -9,10 +9,10 @@ use hir_def::{ | |||
9 | resolver::{resolver_for_expr, ResolveValueResult, ValueNs}, | 9 | resolver::{resolver_for_expr, ResolveValueResult, ValueNs}, |
10 | DefWithBodyId, | 10 | DefWithBodyId, |
11 | }; | 11 | }; |
12 | use hir_expand::diagnostics::DiagnosticSink; | ||
13 | 12 | ||
14 | use crate::{ | 13 | use crate::{ |
15 | db::HirDatabase, diagnostics::MissingUnsafe, InferenceResult, Interner, TyExt, TyKind, | 14 | db::HirDatabase, diagnostics::MissingUnsafe, diagnostics_sink::DiagnosticSink, InferenceResult, |
15 | Interner, TyExt, TyKind, | ||
16 | }; | 16 | }; |
17 | 17 | ||
18 | pub(super) struct UnsafeValidator<'a, 'b: 'a> { | 18 | pub(super) struct UnsafeValidator<'a, 'b: 'a> { |
diff --git a/crates/hir_expand/src/diagnostics.rs b/crates/hir_ty/src/diagnostics_sink.rs index bf0b85ce9..084fa8b06 100644 --- a/crates/hir_expand/src/diagnostics.rs +++ b/crates/hir_ty/src/diagnostics_sink.rs | |||
@@ -16,10 +16,9 @@ | |||
16 | 16 | ||
17 | use std::{any::Any, fmt}; | 17 | use std::{any::Any, fmt}; |
18 | 18 | ||
19 | use hir_expand::InFile; | ||
19 | use syntax::SyntaxNodePtr; | 20 | use syntax::SyntaxNodePtr; |
20 | 21 | ||
21 | use crate::InFile; | ||
22 | |||
23 | #[derive(Copy, Clone, Debug, PartialEq)] | 22 | #[derive(Copy, Clone, Debug, PartialEq)] |
24 | pub struct DiagnosticCode(pub &'static str); | 23 | pub struct DiagnosticCode(pub &'static str); |
25 | 24 | ||
diff --git a/crates/hir_ty/src/display.rs b/crates/hir_ty/src/display.rs index 7bbd1a1f7..44f843bf3 100644 --- a/crates/hir_ty/src/display.rs +++ b/crates/hir_ty/src/display.rs | |||
@@ -13,6 +13,7 @@ use hir_def::{ | |||
13 | db::DefDatabase, | 13 | db::DefDatabase, |
14 | find_path, | 14 | find_path, |
15 | generics::TypeParamProvenance, | 15 | generics::TypeParamProvenance, |
16 | intern::{Internable, Interned}, | ||
16 | item_scope::ItemInNs, | 17 | item_scope::ItemInNs, |
17 | path::{Path, PathKind}, | 18 | path::{Path, PathKind}, |
18 | type_ref::{TypeBound, TypeRef}, | 19 | type_ref::{TypeBound, TypeRef}, |
@@ -256,6 +257,12 @@ impl<T: HirDisplay> HirDisplay for &'_ T { | |||
256 | } | 257 | } |
257 | } | 258 | } |
258 | 259 | ||
260 | impl<T: HirDisplay + Internable> HirDisplay for Interned<T> { | ||
261 | fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { | ||
262 | HirDisplay::hir_fmt(self.as_ref(), f) | ||
263 | } | ||
264 | } | ||
265 | |||
259 | impl HirDisplay for ProjectionTy { | 266 | impl HirDisplay for ProjectionTy { |
260 | fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { | 267 | fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { |
261 | if f.should_truncate() { | 268 | if f.should_truncate() { |
@@ -771,8 +778,10 @@ fn write_bounds_like_dyn_trait( | |||
771 | } | 778 | } |
772 | WhereClause::AliasEq(alias_eq) if is_fn_trait => { | 779 | WhereClause::AliasEq(alias_eq) if is_fn_trait => { |
773 | is_fn_trait = false; | 780 | is_fn_trait = false; |
774 | write!(f, " -> ")?; | 781 | if !alias_eq.ty.is_unit() { |
775 | alias_eq.ty.hir_fmt(f)?; | 782 | write!(f, " -> ")?; |
783 | alias_eq.ty.hir_fmt(f)?; | ||
784 | } | ||
776 | } | 785 | } |
777 | WhereClause::AliasEq(AliasEq { ty, alias }) => { | 786 | WhereClause::AliasEq(AliasEq { ty, alias }) => { |
778 | // in types in actual Rust, these will always come | 787 | // in types in actual Rust, these will always come |
diff --git a/crates/hir_ty/src/infer.rs b/crates/hir_ty/src/infer.rs index db3c937ff..7a4268819 100644 --- a/crates/hir_ty/src/infer.rs +++ b/crates/hir_ty/src/infer.rs | |||
@@ -28,13 +28,14 @@ use hir_def::{ | |||
28 | AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule, Lookup, | 28 | AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule, Lookup, |
29 | TraitId, TypeAliasId, VariantId, | 29 | TraitId, TypeAliasId, VariantId, |
30 | }; | 30 | }; |
31 | use hir_expand::{diagnostics::DiagnosticSink, name::name}; | 31 | use hir_expand::name::name; |
32 | use la_arena::ArenaMap; | 32 | use la_arena::ArenaMap; |
33 | use rustc_hash::FxHashMap; | 33 | use rustc_hash::FxHashMap; |
34 | use stdx::impl_from; | 34 | use stdx::impl_from; |
35 | use syntax::SmolStr; | 35 | use syntax::SmolStr; |
36 | 36 | ||
37 | use super::{DomainGoal, InEnvironment, ProjectionTy, TraitEnvironment, TraitRef, Ty}; | 37 | use super::{DomainGoal, InEnvironment, ProjectionTy, TraitEnvironment, TraitRef, Ty}; |
38 | use crate::diagnostics_sink::DiagnosticSink; | ||
38 | use crate::{ | 39 | use crate::{ |
39 | db::HirDatabase, fold_tys, infer::diagnostics::InferenceDiagnostic, | 40 | db::HirDatabase, fold_tys, infer::diagnostics::InferenceDiagnostic, |
40 | lower::ImplTraitLoweringMode, to_assoc_type_id, AliasEq, AliasTy, Goal, Interner, Substitution, | 41 | lower::ImplTraitLoweringMode, to_assoc_type_id, AliasEq, AliasTy, Goal, Interner, Substitution, |
@@ -557,7 +558,13 @@ impl<'a> InferenceContext<'a> { | |||
557 | 558 | ||
558 | self.infer_pat(*pat, &ty, BindingMode::default()); | 559 | self.infer_pat(*pat, &ty, BindingMode::default()); |
559 | } | 560 | } |
560 | let return_ty = self.make_ty_with_mode(&data.ret_type, ImplTraitLoweringMode::Disallowed); // FIXME implement RPIT | 561 | let error_ty = &TypeRef::Error; |
562 | let return_ty = if data.is_async() { | ||
563 | data.async_ret_type.as_deref().unwrap_or(error_ty) | ||
564 | } else { | ||
565 | &*data.ret_type | ||
566 | }; | ||
567 | let return_ty = self.make_ty_with_mode(return_ty, ImplTraitLoweringMode::Disallowed); // FIXME implement RPIT | ||
561 | self.return_ty = return_ty; | 568 | self.return_ty = return_ty; |
562 | } | 569 | } |
563 | 570 | ||
@@ -578,9 +585,14 @@ impl<'a> InferenceContext<'a> { | |||
578 | } | 585 | } |
579 | 586 | ||
580 | fn resolve_ops_try_ok(&self) -> Option<TypeAliasId> { | 587 | fn resolve_ops_try_ok(&self) -> Option<TypeAliasId> { |
588 | // FIXME resolve via lang_item once try v2 is stable | ||
581 | let path = path![core::ops::Try]; | 589 | let path = path![core::ops::Try]; |
582 | let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?; | 590 | let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?; |
583 | self.db.trait_data(trait_).associated_type_by_name(&name![Ok]) | 591 | let trait_data = self.db.trait_data(trait_); |
592 | trait_data | ||
593 | // FIXME remove once try v2 is stable | ||
594 | .associated_type_by_name(&name![Ok]) | ||
595 | .or_else(|| trait_data.associated_type_by_name(&name![Output])) | ||
584 | } | 596 | } |
585 | 597 | ||
586 | fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> { | 598 | fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> { |
@@ -793,11 +805,11 @@ impl std::ops::BitOrAssign for Diverges { | |||
793 | 805 | ||
794 | mod diagnostics { | 806 | mod diagnostics { |
795 | use hir_def::{expr::ExprId, DefWithBodyId}; | 807 | use hir_def::{expr::ExprId, DefWithBodyId}; |
796 | use hir_expand::diagnostics::DiagnosticSink; | ||
797 | 808 | ||
798 | use crate::{ | 809 | use crate::{ |
799 | db::HirDatabase, | 810 | db::HirDatabase, |
800 | diagnostics::{BreakOutsideOfLoop, NoSuchField}, | 811 | diagnostics::{BreakOutsideOfLoop, NoSuchField}, |
812 | diagnostics_sink::DiagnosticSink, | ||
801 | }; | 813 | }; |
802 | 814 | ||
803 | #[derive(Debug, PartialEq, Eq, Clone)] | 815 | #[derive(Debug, PartialEq, Eq, Clone)] |
diff --git a/crates/hir_ty/src/infer/coerce.rs b/crates/hir_ty/src/infer/coerce.rs index 765a02b1c..03b97e7db 100644 --- a/crates/hir_ty/src/infer/coerce.rs +++ b/crates/hir_ty/src/infer/coerce.rs | |||
@@ -76,17 +76,17 @@ impl<'a> InferenceContext<'a> { | |||
76 | // way around first would mean we make the type variable `!`, instead of | 76 | // way around first would mean we make the type variable `!`, instead of |
77 | // just marking it as possibly diverging. | 77 | // just marking it as possibly diverging. |
78 | if self.coerce(&ty2, &ty1) { | 78 | if self.coerce(&ty2, &ty1) { |
79 | ty1.clone() | 79 | ty1 |
80 | } else if self.coerce(&ty1, &ty2) { | 80 | } else if self.coerce(&ty1, &ty2) { |
81 | ty2.clone() | 81 | ty2 |
82 | } else { | 82 | } else { |
83 | if let Some(id) = id { | 83 | if let Some(id) = id { |
84 | self.result | 84 | self.result |
85 | .type_mismatches | 85 | .type_mismatches |
86 | .insert(id.into(), TypeMismatch { expected: ty1.clone(), actual: ty2.clone() }); | 86 | .insert(id.into(), TypeMismatch { expected: ty1.clone(), actual: ty2 }); |
87 | } | 87 | } |
88 | cov_mark::hit!(coerce_merge_fail_fallback); | 88 | cov_mark::hit!(coerce_merge_fail_fallback); |
89 | ty1.clone() | 89 | ty1 |
90 | } | 90 | } |
91 | } | 91 | } |
92 | 92 | ||
@@ -183,7 +183,7 @@ impl<'a> InferenceContext<'a> { | |||
183 | // details of coercion errors though, so I think it's useful to leave | 183 | // details of coercion errors though, so I think it's useful to leave |
184 | // the structure like it is. | 184 | // the structure like it is. |
185 | 185 | ||
186 | let canonicalized = self.canonicalize(from_ty.clone()); | 186 | let canonicalized = self.canonicalize(from_ty); |
187 | let autoderef = autoderef::autoderef( | 187 | let autoderef = autoderef::autoderef( |
188 | self.db, | 188 | self.db, |
189 | self.resolver.krate(), | 189 | self.resolver.krate(), |
@@ -389,7 +389,7 @@ impl<'a> InferenceContext<'a> { | |||
389 | // The CoerceUnsized trait should have two generic params: Self and T. | 389 | // The CoerceUnsized trait should have two generic params: Self and T. |
390 | return Err(TypeError); | 390 | return Err(TypeError); |
391 | } | 391 | } |
392 | b.push(coerce_from.clone()).push(to_ty.clone()).build() | 392 | b.push(coerce_from).push(to_ty.clone()).build() |
393 | }; | 393 | }; |
394 | 394 | ||
395 | let goal: InEnvironment<DomainGoal> = | 395 | let goal: InEnvironment<DomainGoal> = |
diff --git a/crates/hir_ty/src/infer/expr.rs b/crates/hir_ty/src/infer/expr.rs index eab8fac91..97507305c 100644 --- a/crates/hir_ty/src/infer/expr.rs +++ b/crates/hir_ty/src/infer/expr.rs | |||
@@ -44,7 +44,7 @@ impl<'a> InferenceContext<'a> { | |||
44 | if !could_unify { | 44 | if !could_unify { |
45 | self.result.type_mismatches.insert( | 45 | self.result.type_mismatches.insert( |
46 | tgt_expr.into(), | 46 | tgt_expr.into(), |
47 | TypeMismatch { expected: expected_ty.clone(), actual: ty.clone() }, | 47 | TypeMismatch { expected: expected_ty, actual: ty.clone() }, |
48 | ); | 48 | ); |
49 | } | 49 | } |
50 | } | 50 | } |
@@ -57,15 +57,14 @@ impl<'a> InferenceContext<'a> { | |||
57 | let ty = self.infer_expr_inner(expr, &expected); | 57 | let ty = self.infer_expr_inner(expr, &expected); |
58 | let ty = if let Some(target) = expected.only_has_type(&mut self.table) { | 58 | let ty = if let Some(target) = expected.only_has_type(&mut self.table) { |
59 | if !self.coerce(&ty, &target) { | 59 | if !self.coerce(&ty, &target) { |
60 | self.result.type_mismatches.insert( | 60 | self.result |
61 | expr.into(), | 61 | .type_mismatches |
62 | TypeMismatch { expected: target.clone(), actual: ty.clone() }, | 62 | .insert(expr.into(), TypeMismatch { expected: target, actual: ty.clone() }); |
63 | ); | ||
64 | // Return actual type when type mismatch. | 63 | // Return actual type when type mismatch. |
65 | // This is needed for diagnostic when return type mismatch. | 64 | // This is needed for diagnostic when return type mismatch. |
66 | ty | 65 | ty |
67 | } else { | 66 | } else { |
68 | target.clone() | 67 | target |
69 | } | 68 | } |
70 | } else { | 69 | } else { |
71 | ty | 70 | ty |
@@ -805,7 +804,7 @@ impl<'a> InferenceContext<'a> { | |||
805 | None => self.table.new_float_var(), | 804 | None => self.table.new_float_var(), |
806 | }, | 805 | }, |
807 | }, | 806 | }, |
808 | Expr::MacroStmts { tail } => self.infer_expr(*tail, expected), | 807 | Expr::MacroStmts { tail } => self.infer_expr_inner(*tail, expected), |
809 | }; | 808 | }; |
810 | // use a new type variable if we got unknown here | 809 | // use a new type variable if we got unknown here |
811 | let ty = self.insert_type_vars_shallow(ty); | 810 | let ty = self.insert_type_vars_shallow(ty); |
diff --git a/crates/hir_ty/src/infer/pat.rs b/crates/hir_ty/src/infer/pat.rs index 9c8e3b6ae..83e0a7a9e 100644 --- a/crates/hir_ty/src/infer/pat.rs +++ b/crates/hir_ty/src/infer/pat.rs | |||
@@ -196,7 +196,7 @@ impl<'a> InferenceContext<'a> { | |||
196 | let inner_ty = if let Some(subpat) = subpat { | 196 | let inner_ty = if let Some(subpat) = subpat { |
197 | self.infer_pat(*subpat, &expected, default_bm) | 197 | self.infer_pat(*subpat, &expected, default_bm) |
198 | } else { | 198 | } else { |
199 | expected.clone() | 199 | expected |
200 | }; | 200 | }; |
201 | let inner_ty = self.insert_type_vars_shallow(inner_ty); | 201 | let inner_ty = self.insert_type_vars_shallow(inner_ty); |
202 | 202 | ||
@@ -266,10 +266,9 @@ impl<'a> InferenceContext<'a> { | |||
266 | // use a new type variable if we got error type here | 266 | // use a new type variable if we got error type here |
267 | let ty = self.insert_type_vars_shallow(ty); | 267 | let ty = self.insert_type_vars_shallow(ty); |
268 | if !self.unify(&ty, &expected) { | 268 | if !self.unify(&ty, &expected) { |
269 | self.result.type_mismatches.insert( | 269 | self.result |
270 | pat.into(), | 270 | .type_mismatches |
271 | TypeMismatch { expected: expected.clone(), actual: ty.clone() }, | 271 | .insert(pat.into(), TypeMismatch { expected: expected, actual: ty.clone() }); |
272 | ); | ||
273 | } | 272 | } |
274 | self.write_pat_ty(pat, ty.clone()); | 273 | self.write_pat_ty(pat, ty.clone()); |
275 | ty | 274 | ty |
diff --git a/crates/hir_ty/src/lib.rs b/crates/hir_ty/src/lib.rs index ef021978a..50e0d6333 100644 --- a/crates/hir_ty/src/lib.rs +++ b/crates/hir_ty/src/lib.rs | |||
@@ -21,6 +21,7 @@ mod utils; | |||
21 | mod walk; | 21 | mod walk; |
22 | pub mod db; | 22 | pub mod db; |
23 | pub mod diagnostics; | 23 | pub mod diagnostics; |
24 | pub mod diagnostics_sink; | ||
24 | pub mod display; | 25 | pub mod display; |
25 | pub mod method_resolution; | 26 | pub mod method_resolution; |
26 | pub mod primitive; | 27 | pub mod primitive; |
diff --git a/crates/hir_ty/src/lower.rs b/crates/hir_ty/src/lower.rs index 8a375b973..c83933c73 100644 --- a/crates/hir_ty/src/lower.rs +++ b/crates/hir_ty/src/lower.rs | |||
@@ -10,6 +10,7 @@ use std::{iter, sync::Arc}; | |||
10 | 10 | ||
11 | use base_db::CrateId; | 11 | use base_db::CrateId; |
12 | use chalk_ir::{cast::Cast, fold::Shift, interner::HasInterner, Mutability, Safety}; | 12 | use chalk_ir::{cast::Cast, fold::Shift, interner::HasInterner, Mutability, Safety}; |
13 | use hir_def::intern::Interned; | ||
13 | use hir_def::{ | 14 | use hir_def::{ |
14 | adt::StructKind, | 15 | adt::StructKind, |
15 | body::{Expander, LowerCtx}, | 16 | body::{Expander, LowerCtx}, |
@@ -843,7 +844,7 @@ impl<'a> TyLoweringContext<'a> { | |||
843 | }) | 844 | }) |
844 | } | 845 | } |
845 | 846 | ||
846 | fn lower_impl_trait(&self, bounds: &[TypeBound]) -> ReturnTypeImplTrait { | 847 | fn lower_impl_trait(&self, bounds: &[Interned<TypeBound>]) -> ReturnTypeImplTrait { |
847 | cov_mark::hit!(lower_rpit); | 848 | cov_mark::hit!(lower_rpit); |
848 | let self_ty = | 849 | let self_ty = |
849 | TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(&Interner); | 850 | TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(&Interner); |
@@ -1025,7 +1026,7 @@ pub(crate) fn trait_environment_query( | |||
1025 | }; | 1026 | }; |
1026 | if let Some(AssocContainerId::TraitId(trait_id)) = container { | 1027 | if let Some(AssocContainerId::TraitId(trait_id)) = container { |
1027 | // add `Self: Trait<T1, T2, ...>` to the environment in trait | 1028 | // add `Self: Trait<T1, T2, ...>` to the environment in trait |
1028 | // function default implementations (and hypothetical code | 1029 | // function default implementations (and speculative code |
1029 | // inside consts or type aliases) | 1030 | // inside consts or type aliases) |
1030 | cov_mark::hit!(trait_self_implements_self); | 1031 | cov_mark::hit!(trait_self_implements_self); |
1031 | let substs = TyBuilder::type_params_subst(db, trait_id); | 1032 | let substs = TyBuilder::type_params_subst(db, trait_id); |
diff --git a/crates/hir_ty/src/tests.rs b/crates/hir_ty/src/tests.rs index cc819373c..9d726b024 100644 --- a/crates/hir_ty/src/tests.rs +++ b/crates/hir_ty/src/tests.rs | |||
@@ -7,6 +7,7 @@ mod traits; | |||
7 | mod method_resolution; | 7 | mod method_resolution; |
8 | mod macros; | 8 | mod macros; |
9 | mod display_source_code; | 9 | mod display_source_code; |
10 | mod incremental; | ||
10 | 11 | ||
11 | use std::{env, sync::Arc}; | 12 | use std::{env, sync::Arc}; |
12 | 13 | ||
@@ -317,50 +318,6 @@ fn ellipsize(mut text: String, max_len: usize) -> String { | |||
317 | text | 318 | text |
318 | } | 319 | } |
319 | 320 | ||
320 | #[test] | ||
321 | fn typing_whitespace_inside_a_function_should_not_invalidate_types() { | ||
322 | let (mut db, pos) = TestDB::with_position( | ||
323 | " | ||
324 | //- /lib.rs | ||
325 | fn foo() -> i32 { | ||
326 | $01 + 1 | ||
327 | } | ||
328 | ", | ||
329 | ); | ||
330 | { | ||
331 | let events = db.log_executed(|| { | ||
332 | let module = db.module_for_file(pos.file_id); | ||
333 | let crate_def_map = module.def_map(&db); | ||
334 | visit_module(&db, &crate_def_map, module.local_id, &mut |def| { | ||
335 | db.infer(def); | ||
336 | }); | ||
337 | }); | ||
338 | assert!(format!("{:?}", events).contains("infer")) | ||
339 | } | ||
340 | |||
341 | let new_text = " | ||
342 | fn foo() -> i32 { | ||
343 | 1 | ||
344 | + | ||
345 | 1 | ||
346 | } | ||
347 | " | ||
348 | .to_string(); | ||
349 | |||
350 | db.set_file_text(pos.file_id, Arc::new(new_text)); | ||
351 | |||
352 | { | ||
353 | let events = db.log_executed(|| { | ||
354 | let module = db.module_for_file(pos.file_id); | ||
355 | let crate_def_map = module.def_map(&db); | ||
356 | visit_module(&db, &crate_def_map, module.local_id, &mut |def| { | ||
357 | db.infer(def); | ||
358 | }); | ||
359 | }); | ||
360 | assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) | ||
361 | } | ||
362 | } | ||
363 | |||
364 | fn check_infer(ra_fixture: &str, expect: Expect) { | 321 | fn check_infer(ra_fixture: &str, expect: Expect) { |
365 | let mut actual = infer(ra_fixture); | 322 | let mut actual = infer(ra_fixture); |
366 | actual.push('\n'); | 323 | actual.push('\n'); |
diff --git a/crates/hir_ty/src/tests/coercion.rs b/crates/hir_ty/src/tests/coercion.rs index bb568ea37..6dac7e103 100644 --- a/crates/hir_ty/src/tests/coercion.rs +++ b/crates/hir_ty/src/tests/coercion.rs | |||
@@ -832,11 +832,9 @@ fn coerce_unsize_super_trait_cycle() { | |||
832 | ); | 832 | ); |
833 | } | 833 | } |
834 | 834 | ||
835 | #[ignore] | ||
836 | #[test] | 835 | #[test] |
837 | fn coerce_unsize_generic() { | 836 | fn coerce_unsize_generic() { |
838 | // FIXME: Implement this | 837 | // FIXME: fix the type mismatches here |
839 | // https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions | ||
840 | check_infer_with_mismatches( | 838 | check_infer_with_mismatches( |
841 | r#" | 839 | r#" |
842 | #[lang = "unsize"] | 840 | #[lang = "unsize"] |
@@ -854,8 +852,58 @@ fn coerce_unsize_generic() { | |||
854 | let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] }); | 852 | let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] }); |
855 | } | 853 | } |
856 | "#, | 854 | "#, |
857 | expect![[r" | 855 | expect![[r#" |
858 | "]], | 856 | 209..317 '{ ... }); }': () |
857 | 219..220 '_': &Foo<[usize]> | ||
858 | 238..259 '&Foo {..., 3] }': &Foo<[usize]> | ||
859 | 239..259 'Foo { ..., 3] }': Foo<[usize]> | ||
860 | 248..257 '[1, 2, 3]': [usize; 3] | ||
861 | 249..250 '1': usize | ||
862 | 252..253 '2': usize | ||
863 | 255..256 '3': usize | ||
864 | 269..270 '_': &Bar<[usize]> | ||
865 | 288..314 '&Bar(F... 3] })': &Bar<[i32; 3]> | ||
866 | 289..292 'Bar': Bar<[i32; 3]>(Foo<[i32; 3]>) -> Bar<[i32; 3]> | ||
867 | 289..314 'Bar(Fo... 3] })': Bar<[i32; 3]> | ||
868 | 293..313 'Foo { ..., 3] }': Foo<[i32; 3]> | ||
869 | 302..311 '[1, 2, 3]': [i32; 3] | ||
870 | 303..304 '1': i32 | ||
871 | 306..307 '2': i32 | ||
872 | 309..310 '3': i32 | ||
873 | 248..257: expected [usize], got [usize; 3] | ||
874 | 288..314: expected &Bar<[usize]>, got &Bar<[i32; 3]> | ||
875 | "#]], | ||
876 | ); | ||
877 | } | ||
878 | |||
879 | #[test] | ||
880 | fn coerce_unsize_apit() { | ||
881 | // FIXME: #8984 | ||
882 | check_infer_with_mismatches( | ||
883 | r#" | ||
884 | #[lang = "sized"] | ||
885 | pub trait Sized {} | ||
886 | #[lang = "unsize"] | ||
887 | pub trait Unsize<T> {} | ||
888 | #[lang = "coerce_unsized"] | ||
889 | pub trait CoerceUnsized<T> {} | ||
890 | |||
891 | impl<T: Unsize<U>, U> CoerceUnsized<&U> for &T {} | ||
892 | |||
893 | trait Foo {} | ||
894 | |||
895 | fn test(f: impl Foo) { | ||
896 | let _: &dyn Foo = &f; | ||
897 | } | ||
898 | "#, | ||
899 | expect![[r#" | ||
900 | 210..211 'f': impl Foo | ||
901 | 223..252 '{ ... &f; }': () | ||
902 | 233..234 '_': &dyn Foo | ||
903 | 247..249 '&f': &impl Foo | ||
904 | 248..249 'f': impl Foo | ||
905 | 247..249: expected &dyn Foo, got &impl Foo | ||
906 | "#]], | ||
859 | ); | 907 | ); |
860 | } | 908 | } |
861 | 909 | ||
@@ -912,3 +960,46 @@ fn test() -> i32 { | |||
912 | "#, | 960 | "#, |
913 | ) | 961 | ) |
914 | } | 962 | } |
963 | |||
964 | #[test] | ||
965 | fn panic_macro() { | ||
966 | check_infer_with_mismatches( | ||
967 | r#" | ||
968 | mod panic { | ||
969 | #[macro_export] | ||
970 | pub macro panic_2015 { | ||
971 | () => ( | ||
972 | $crate::panicking::panic() | ||
973 | ), | ||
974 | } | ||
975 | } | ||
976 | |||
977 | mod panicking { | ||
978 | pub fn panic() -> ! { loop {} } | ||
979 | } | ||
980 | |||
981 | #[rustc_builtin_macro = "core_panic"] | ||
982 | macro_rules! panic { | ||
983 | // Expands to either `$crate::panic::panic_2015` or `$crate::panic::panic_2021` | ||
984 | // depending on the edition of the caller. | ||
985 | ($($arg:tt)*) => { | ||
986 | /* compiler built-in */ | ||
987 | }; | ||
988 | } | ||
989 | |||
990 | fn main() { | ||
991 | panic!() | ||
992 | } | ||
993 | "#, | ||
994 | expect![[r#" | ||
995 | 174..185 '{ loop {} }': ! | ||
996 | 176..183 'loop {}': ! | ||
997 | 181..183 '{}': () | ||
998 | !0..24 '$crate...:panic': fn panic() -> ! | ||
999 | !0..26 '$crate...anic()': ! | ||
1000 | !0..26 '$crate...anic()': ! | ||
1001 | !0..28 '$crate...015!()': ! | ||
1002 | 454..470 '{ ...c!() }': () | ||
1003 | "#]], | ||
1004 | ); | ||
1005 | } | ||
diff --git a/crates/hir_ty/src/tests/incremental.rs b/crates/hir_ty/src/tests/incremental.rs new file mode 100644 index 000000000..3e08e83e8 --- /dev/null +++ b/crates/hir_ty/src/tests/incremental.rs | |||
@@ -0,0 +1,51 @@ | |||
1 | use std::sync::Arc; | ||
2 | |||
3 | use base_db::{fixture::WithFixture, SourceDatabaseExt}; | ||
4 | |||
5 | use crate::{db::HirDatabase, test_db::TestDB}; | ||
6 | |||
7 | use super::visit_module; | ||
8 | |||
9 | #[test] | ||
10 | fn typing_whitespace_inside_a_function_should_not_invalidate_types() { | ||
11 | let (mut db, pos) = TestDB::with_position( | ||
12 | " | ||
13 | //- /lib.rs | ||
14 | fn foo() -> i32 { | ||
15 | $01 + 1 | ||
16 | } | ||
17 | ", | ||
18 | ); | ||
19 | { | ||
20 | let events = db.log_executed(|| { | ||
21 | let module = db.module_for_file(pos.file_id); | ||
22 | let crate_def_map = module.def_map(&db); | ||
23 | visit_module(&db, &crate_def_map, module.local_id, &mut |def| { | ||
24 | db.infer(def); | ||
25 | }); | ||
26 | }); | ||
27 | assert!(format!("{:?}", events).contains("infer")) | ||
28 | } | ||
29 | |||
30 | let new_text = " | ||
31 | fn foo() -> i32 { | ||
32 | 1 | ||
33 | + | ||
34 | 1 | ||
35 | } | ||
36 | " | ||
37 | .to_string(); | ||
38 | |||
39 | db.set_file_text(pos.file_id, Arc::new(new_text)); | ||
40 | |||
41 | { | ||
42 | let events = db.log_executed(|| { | ||
43 | let module = db.module_for_file(pos.file_id); | ||
44 | let crate_def_map = module.def_map(&db); | ||
45 | visit_module(&db, &crate_def_map, module.local_id, &mut |def| { | ||
46 | db.infer(def); | ||
47 | }); | ||
48 | }); | ||
49 | assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) | ||
50 | } | ||
51 | } | ||
diff --git a/crates/hir_ty/src/tests/macros.rs b/crates/hir_ty/src/tests/macros.rs index 6588aa46c..7647bb08b 100644 --- a/crates/hir_ty/src/tests/macros.rs +++ b/crates/hir_ty/src/tests/macros.rs | |||
@@ -752,6 +752,24 @@ fn bar() -> u32 {0} | |||
752 | } | 752 | } |
753 | 753 | ||
754 | #[test] | 754 | #[test] |
755 | fn infer_builtin_macros_include_expression() { | ||
756 | check_types( | ||
757 | r#" | ||
758 | //- /main.rs | ||
759 | #[rustc_builtin_macro] | ||
760 | macro_rules! include {() => {}} | ||
761 | fn main() { | ||
762 | let i = include!("bla.rs"); | ||
763 | i; | ||
764 | //^ i32 | ||
765 | } | ||
766 | //- /bla.rs | ||
767 | 0 | ||
768 | "#, | ||
769 | ) | ||
770 | } | ||
771 | |||
772 | #[test] | ||
755 | fn infer_builtin_macros_include_child_mod() { | 773 | fn infer_builtin_macros_include_child_mod() { |
756 | check_types( | 774 | check_types( |
757 | r#" | 775 | r#" |
diff --git a/crates/hir_ty/src/tests/traits.rs b/crates/hir_ty/src/tests/traits.rs index a5a2df54c..7c0ff2170 100644 --- a/crates/hir_ty/src/tests/traits.rs +++ b/crates/hir_ty/src/tests/traits.rs | |||
@@ -161,6 +161,43 @@ mod result { | |||
161 | } | 161 | } |
162 | 162 | ||
163 | #[test] | 163 | #[test] |
164 | fn infer_tryv2() { | ||
165 | check_types( | ||
166 | r#" | ||
167 | //- /main.rs crate:main deps:core | ||
168 | fn test() { | ||
169 | let r: Result<i32, u64> = Result::Ok(1); | ||
170 | let v = r?; | ||
171 | v; | ||
172 | } //^ i32 | ||
173 | |||
174 | //- /core.rs crate:core | ||
175 | #[prelude_import] use ops::*; | ||
176 | mod ops { | ||
177 | trait Try { | ||
178 | type Output; | ||
179 | type Residual; | ||
180 | } | ||
181 | } | ||
182 | |||
183 | #[prelude_import] use result::*; | ||
184 | mod result { | ||
185 | enum Infallible {} | ||
186 | enum Result<O, E> { | ||
187 | Ok(O), | ||
188 | Err(E) | ||
189 | } | ||
190 | |||
191 | impl<O, E> crate::ops::Try for Result<O, E> { | ||
192 | type Output = O; | ||
193 | type Error = Result<Infallible, E>; | ||
194 | } | ||
195 | } | ||
196 | "#, | ||
197 | ); | ||
198 | } | ||
199 | |||
200 | #[test] | ||
164 | fn infer_for_loop() { | 201 | fn infer_for_loop() { |
165 | check_types( | 202 | check_types( |
166 | r#" | 203 | r#" |
@@ -3041,7 +3078,7 @@ fn infer_fn_trait_arg() { | |||
3041 | 3078 | ||
3042 | #[test] | 3079 | #[test] |
3043 | fn infer_box_fn_arg() { | 3080 | fn infer_box_fn_arg() { |
3044 | // The type mismatch is a bug | 3081 | // The type mismatch is because we don't define Unsize and CoerceUnsized |
3045 | check_infer_with_mismatches( | 3082 | check_infer_with_mismatches( |
3046 | r#" | 3083 | r#" |
3047 | //- /lib.rs deps:std | 3084 | //- /lib.rs deps:std |
@@ -3101,7 +3138,7 @@ fn foo() { | |||
3101 | 555..557 'ps': {unknown} | 3138 | 555..557 'ps': {unknown} |
3102 | 559..561 '{}': () | 3139 | 559..561 '{}': () |
3103 | 568..569 'f': Box<dyn FnOnce(&Option<i32>)> | 3140 | 568..569 'f': Box<dyn FnOnce(&Option<i32>)> |
3104 | 568..573 'f(&s)': FnOnce::Output<dyn FnOnce(&Option<i32>), (&Option<i32>,)> | 3141 | 568..573 'f(&s)': () |
3105 | 570..572 '&s': &Option<i32> | 3142 | 570..572 '&s': &Option<i32> |
3106 | 571..572 's': Option<i32> | 3143 | 571..572 's': Option<i32> |
3107 | 549..562: expected Box<dyn FnOnce(&Option<i32>)>, got Box<|{unknown}| -> ()> | 3144 | 549..562: expected Box<dyn FnOnce(&Option<i32>)>, got Box<|{unknown}| -> ()> |
@@ -3571,3 +3608,104 @@ fn main() { | |||
3571 | "#]], | 3608 | "#]], |
3572 | ) | 3609 | ) |
3573 | } | 3610 | } |
3611 | |||
3612 | #[test] | ||
3613 | fn fn_returning_unit() { | ||
3614 | check_infer_with_mismatches( | ||
3615 | r#" | ||
3616 | #[lang = "fn_once"] | ||
3617 | trait FnOnce<Args> { | ||
3618 | type Output; | ||
3619 | } | ||
3620 | |||
3621 | fn test<F: FnOnce()>(f: F) { | ||
3622 | let _: () = f(); | ||
3623 | }"#, | ||
3624 | expect![[r#" | ||
3625 | 82..83 'f': F | ||
3626 | 88..112 '{ ...f(); }': () | ||
3627 | 98..99 '_': () | ||
3628 | 106..107 'f': F | ||
3629 | 106..109 'f()': () | ||
3630 | "#]], | ||
3631 | ); | ||
3632 | } | ||
3633 | |||
3634 | #[test] | ||
3635 | fn trait_in_scope_of_trait_impl() { | ||
3636 | check_infer( | ||
3637 | r#" | ||
3638 | mod foo { | ||
3639 | pub trait Foo { | ||
3640 | fn foo(self); | ||
3641 | fn bar(self) -> usize { 0 } | ||
3642 | } | ||
3643 | } | ||
3644 | impl foo::Foo for u32 { | ||
3645 | fn foo(self) { | ||
3646 | let _x = self.bar(); | ||
3647 | } | ||
3648 | } | ||
3649 | "#, | ||
3650 | expect![[r#" | ||
3651 | 45..49 'self': Self | ||
3652 | 67..71 'self': Self | ||
3653 | 82..87 '{ 0 }': usize | ||
3654 | 84..85 '0': usize | ||
3655 | 131..135 'self': u32 | ||
3656 | 137..173 '{ ... }': () | ||
3657 | 151..153 '_x': usize | ||
3658 | 156..160 'self': u32 | ||
3659 | 156..166 'self.bar()': usize | ||
3660 | "#]], | ||
3661 | ); | ||
3662 | } | ||
3663 | |||
3664 | #[test] | ||
3665 | fn infer_async_ret_type() { | ||
3666 | check_types( | ||
3667 | r#" | ||
3668 | //- /main.rs crate:main deps:core | ||
3669 | |||
3670 | enum Result<T, E> { | ||
3671 | Ok(T), | ||
3672 | Err(E), | ||
3673 | } | ||
3674 | |||
3675 | use Result::*; | ||
3676 | |||
3677 | |||
3678 | struct Fooey; | ||
3679 | |||
3680 | impl Fooey { | ||
3681 | fn collect<B: Convert>(self) -> B { | ||
3682 | B::new() | ||
3683 | } | ||
3684 | } | ||
3685 | |||
3686 | trait Convert { | ||
3687 | fn new() -> Self; | ||
3688 | } | ||
3689 | impl Convert for u32 { | ||
3690 | fn new() -> Self { | ||
3691 | 0 | ||
3692 | } | ||
3693 | } | ||
3694 | |||
3695 | async fn get_accounts() -> Result<u32, ()> { | ||
3696 | let ret = Fooey.collect(); | ||
3697 | // ^ u32 | ||
3698 | Ok(ret) | ||
3699 | } | ||
3700 | |||
3701 | //- /core.rs crate:core | ||
3702 | #[prelude_import] use future::*; | ||
3703 | mod future { | ||
3704 | #[lang = "future_trait"] | ||
3705 | trait Future { | ||
3706 | type Output; | ||
3707 | } | ||
3708 | } | ||
3709 | "#, | ||
3710 | ); | ||
3711 | } | ||
diff --git a/crates/ide/src/annotations.rs b/crates/ide/src/annotations.rs index 5ebe7fd0e..8d68dce05 100644 --- a/crates/ide/src/annotations.rs +++ b/crates/ide/src/annotations.rs | |||
@@ -5,7 +5,7 @@ use ide_db::{ | |||
5 | helpers::visit_file_defs, | 5 | helpers::visit_file_defs, |
6 | RootDatabase, | 6 | RootDatabase, |
7 | }; | 7 | }; |
8 | use syntax::{ast::NameOwner, AstNode, TextRange, TextSize}; | 8 | use syntax::{ast::NameOwner, AstNode, TextRange}; |
9 | 9 | ||
10 | use crate::{ | 10 | use crate::{ |
11 | fn_references::find_all_methods, | 11 | fn_references::find_all_methods, |
@@ -58,7 +58,7 @@ pub(crate) fn annotations( | |||
58 | } | 58 | } |
59 | 59 | ||
60 | let action = runnable.action(); | 60 | let action = runnable.action(); |
61 | let range = runnable.nav.full_range; | 61 | let range = runnable.nav.focus_or_full_range(); |
62 | 62 | ||
63 | if config.run { | 63 | if config.run { |
64 | annotations.push(Annotation { | 64 | annotations.push(Annotation { |
@@ -80,26 +80,26 @@ pub(crate) fn annotations( | |||
80 | 80 | ||
81 | visit_file_defs(&Semantics::new(db), file_id, &mut |def| match def { | 81 | visit_file_defs(&Semantics::new(db), file_id, &mut |def| match def { |
82 | Either::Left(def) => { | 82 | Either::Left(def) => { |
83 | let node = match def { | 83 | let range = match def { |
84 | hir::ModuleDef::Const(konst) => { | 84 | hir::ModuleDef::Const(konst) => { |
85 | konst.source(db).and_then(|node| range_and_position_of(&node, file_id)) | 85 | konst.source(db).and_then(|node| name_range(&node, file_id)) |
86 | } | 86 | } |
87 | hir::ModuleDef::Trait(trait_) => { | 87 | hir::ModuleDef::Trait(trait_) => { |
88 | trait_.source(db).and_then(|node| range_and_position_of(&node, file_id)) | 88 | trait_.source(db).and_then(|node| name_range(&node, file_id)) |
89 | } | 89 | } |
90 | hir::ModuleDef::Adt(hir::Adt::Struct(strukt)) => { | 90 | hir::ModuleDef::Adt(hir::Adt::Struct(strukt)) => { |
91 | strukt.source(db).and_then(|node| range_and_position_of(&node, file_id)) | 91 | strukt.source(db).and_then(|node| name_range(&node, file_id)) |
92 | } | 92 | } |
93 | hir::ModuleDef::Adt(hir::Adt::Enum(enum_)) => { | 93 | hir::ModuleDef::Adt(hir::Adt::Enum(enum_)) => { |
94 | enum_.source(db).and_then(|node| range_and_position_of(&node, file_id)) | 94 | enum_.source(db).and_then(|node| name_range(&node, file_id)) |
95 | } | 95 | } |
96 | hir::ModuleDef::Adt(hir::Adt::Union(union)) => { | 96 | hir::ModuleDef::Adt(hir::Adt::Union(union)) => { |
97 | union.source(db).and_then(|node| range_and_position_of(&node, file_id)) | 97 | union.source(db).and_then(|node| name_range(&node, file_id)) |
98 | } | 98 | } |
99 | _ => None, | 99 | _ => None, |
100 | }; | 100 | }; |
101 | let (offset, range) = match node { | 101 | let (range, offset) = match range { |
102 | Some(node) => node, | 102 | Some(range) => (range, range.start()), |
103 | None => return, | 103 | None => return, |
104 | }; | 104 | }; |
105 | 105 | ||
@@ -122,18 +122,12 @@ pub(crate) fn annotations( | |||
122 | }); | 122 | }); |
123 | } | 123 | } |
124 | 124 | ||
125 | fn range_and_position_of<T: NameOwner>( | 125 | fn name_range<T: NameOwner>(node: &InFile<T>, file_id: FileId) -> Option<TextRange> { |
126 | node: &InFile<T>, | 126 | if node.file_id == file_id.into() { |
127 | file_id: FileId, | 127 | node.value.name().map(|it| it.syntax().text_range()) |
128 | ) -> Option<(TextSize, TextRange)> { | 128 | } else { |
129 | if node.file_id != file_id.into() { | ||
130 | // Node is outside the file we are adding annotations to (e.g. macros). | 129 | // Node is outside the file we are adding annotations to (e.g. macros). |
131 | None | 130 | None |
132 | } else { | ||
133 | Some(( | ||
134 | node.value.name()?.syntax().text_range().start(), | ||
135 | node.value.syntax().text_range(), | ||
136 | )) | ||
137 | } | 131 | } |
138 | } | 132 | } |
139 | } | 133 | } |
@@ -141,13 +135,15 @@ pub(crate) fn annotations( | |||
141 | }); | 135 | }); |
142 | 136 | ||
143 | if config.annotate_method_references { | 137 | if config.annotate_method_references { |
144 | annotations.extend(find_all_methods(db, file_id).into_iter().map(|method| Annotation { | 138 | annotations.extend(find_all_methods(db, file_id).into_iter().map( |
145 | range: method.range, | 139 | |FileRange { file_id, range }| Annotation { |
146 | kind: AnnotationKind::HasReferences { | 140 | range, |
147 | position: FilePosition { file_id, offset: method.range.start() }, | 141 | kind: AnnotationKind::HasReferences { |
148 | data: None, | 142 | position: FilePosition { file_id, offset: range.start() }, |
143 | data: None, | ||
144 | }, | ||
149 | }, | 145 | }, |
150 | })); | 146 | )); |
151 | } | 147 | } |
152 | 148 | ||
153 | annotations | 149 | annotations |
@@ -228,7 +224,7 @@ fn main() { | |||
228 | expect![[r#" | 224 | expect![[r#" |
229 | [ | 225 | [ |
230 | Annotation { | 226 | Annotation { |
231 | range: 50..85, | 227 | range: 53..57, |
232 | kind: Runnable { | 228 | kind: Runnable { |
233 | debug: false, | 229 | debug: false, |
234 | runnable: Runnable { | 230 | runnable: Runnable { |
@@ -247,7 +243,7 @@ fn main() { | |||
247 | }, | 243 | }, |
248 | }, | 244 | }, |
249 | Annotation { | 245 | Annotation { |
250 | range: 50..85, | 246 | range: 53..57, |
251 | kind: Runnable { | 247 | kind: Runnable { |
252 | debug: true, | 248 | debug: true, |
253 | runnable: Runnable { | 249 | runnable: Runnable { |
@@ -266,7 +262,7 @@ fn main() { | |||
266 | }, | 262 | }, |
267 | }, | 263 | }, |
268 | Annotation { | 264 | Annotation { |
269 | range: 0..22, | 265 | range: 6..10, |
270 | kind: HasReferences { | 266 | kind: HasReferences { |
271 | position: FilePosition { | 267 | position: FilePosition { |
272 | file_id: FileId( | 268 | file_id: FileId( |
@@ -287,7 +283,7 @@ fn main() { | |||
287 | }, | 283 | }, |
288 | }, | 284 | }, |
289 | Annotation { | 285 | Annotation { |
290 | range: 24..48, | 286 | range: 30..36, |
291 | kind: HasReferences { | 287 | kind: HasReferences { |
292 | position: FilePosition { | 288 | position: FilePosition { |
293 | file_id: FileId( | 289 | file_id: FileId( |
@@ -332,7 +328,7 @@ fn main() { | |||
332 | expect![[r#" | 328 | expect![[r#" |
333 | [ | 329 | [ |
334 | Annotation { | 330 | Annotation { |
335 | range: 14..48, | 331 | range: 17..21, |
336 | kind: Runnable { | 332 | kind: Runnable { |
337 | debug: false, | 333 | debug: false, |
338 | runnable: Runnable { | 334 | runnable: Runnable { |
@@ -351,7 +347,7 @@ fn main() { | |||
351 | }, | 347 | }, |
352 | }, | 348 | }, |
353 | Annotation { | 349 | Annotation { |
354 | range: 14..48, | 350 | range: 17..21, |
355 | kind: Runnable { | 351 | kind: Runnable { |
356 | debug: true, | 352 | debug: true, |
357 | runnable: Runnable { | 353 | runnable: Runnable { |
@@ -370,7 +366,7 @@ fn main() { | |||
370 | }, | 366 | }, |
371 | }, | 367 | }, |
372 | Annotation { | 368 | Annotation { |
373 | range: 0..12, | 369 | range: 7..11, |
374 | kind: HasImpls { | 370 | kind: HasImpls { |
375 | position: FilePosition { | 371 | position: FilePosition { |
376 | file_id: FileId( | 372 | file_id: FileId( |
@@ -384,7 +380,7 @@ fn main() { | |||
384 | }, | 380 | }, |
385 | }, | 381 | }, |
386 | Annotation { | 382 | Annotation { |
387 | range: 0..12, | 383 | range: 7..11, |
388 | kind: HasReferences { | 384 | kind: HasReferences { |
389 | position: FilePosition { | 385 | position: FilePosition { |
390 | file_id: FileId( | 386 | file_id: FileId( |
@@ -440,7 +436,7 @@ fn main() { | |||
440 | expect![[r#" | 436 | expect![[r#" |
441 | [ | 437 | [ |
442 | Annotation { | 438 | Annotation { |
443 | range: 66..100, | 439 | range: 69..73, |
444 | kind: Runnable { | 440 | kind: Runnable { |
445 | debug: false, | 441 | debug: false, |
446 | runnable: Runnable { | 442 | runnable: Runnable { |
@@ -459,7 +455,7 @@ fn main() { | |||
459 | }, | 455 | }, |
460 | }, | 456 | }, |
461 | Annotation { | 457 | Annotation { |
462 | range: 66..100, | 458 | range: 69..73, |
463 | kind: Runnable { | 459 | kind: Runnable { |
464 | debug: true, | 460 | debug: true, |
465 | runnable: Runnable { | 461 | runnable: Runnable { |
@@ -478,7 +474,7 @@ fn main() { | |||
478 | }, | 474 | }, |
479 | }, | 475 | }, |
480 | Annotation { | 476 | Annotation { |
481 | range: 0..12, | 477 | range: 7..11, |
482 | kind: HasImpls { | 478 | kind: HasImpls { |
483 | position: FilePosition { | 479 | position: FilePosition { |
484 | file_id: FileId( | 480 | file_id: FileId( |
@@ -502,7 +498,7 @@ fn main() { | |||
502 | }, | 498 | }, |
503 | }, | 499 | }, |
504 | Annotation { | 500 | Annotation { |
505 | range: 0..12, | 501 | range: 7..11, |
506 | kind: HasReferences { | 502 | kind: HasReferences { |
507 | position: FilePosition { | 503 | position: FilePosition { |
508 | file_id: FileId( | 504 | file_id: FileId( |
@@ -529,7 +525,7 @@ fn main() { | |||
529 | }, | 525 | }, |
530 | }, | 526 | }, |
531 | Annotation { | 527 | Annotation { |
532 | range: 14..34, | 528 | range: 20..31, |
533 | kind: HasImpls { | 529 | kind: HasImpls { |
534 | position: FilePosition { | 530 | position: FilePosition { |
535 | file_id: FileId( | 531 | file_id: FileId( |
@@ -553,7 +549,7 @@ fn main() { | |||
553 | }, | 549 | }, |
554 | }, | 550 | }, |
555 | Annotation { | 551 | Annotation { |
556 | range: 14..34, | 552 | range: 20..31, |
557 | kind: HasReferences { | 553 | kind: HasReferences { |
558 | position: FilePosition { | 554 | position: FilePosition { |
559 | file_id: FileId( | 555 | file_id: FileId( |
@@ -601,7 +597,7 @@ fn main() {} | |||
601 | expect![[r#" | 597 | expect![[r#" |
602 | [ | 598 | [ |
603 | Annotation { | 599 | Annotation { |
604 | range: 0..12, | 600 | range: 3..7, |
605 | kind: Runnable { | 601 | kind: Runnable { |
606 | debug: false, | 602 | debug: false, |
607 | runnable: Runnable { | 603 | runnable: Runnable { |
@@ -620,7 +616,7 @@ fn main() {} | |||
620 | }, | 616 | }, |
621 | }, | 617 | }, |
622 | Annotation { | 618 | Annotation { |
623 | range: 0..12, | 619 | range: 3..7, |
624 | kind: Runnable { | 620 | kind: Runnable { |
625 | debug: true, | 621 | debug: true, |
626 | runnable: Runnable { | 622 | runnable: Runnable { |
@@ -674,7 +670,7 @@ fn main() { | |||
674 | expect![[r#" | 670 | expect![[r#" |
675 | [ | 671 | [ |
676 | Annotation { | 672 | Annotation { |
677 | range: 58..95, | 673 | range: 61..65, |
678 | kind: Runnable { | 674 | kind: Runnable { |
679 | debug: false, | 675 | debug: false, |
680 | runnable: Runnable { | 676 | runnable: Runnable { |
@@ -693,7 +689,7 @@ fn main() { | |||
693 | }, | 689 | }, |
694 | }, | 690 | }, |
695 | Annotation { | 691 | Annotation { |
696 | range: 58..95, | 692 | range: 61..65, |
697 | kind: Runnable { | 693 | kind: Runnable { |
698 | debug: true, | 694 | debug: true, |
699 | runnable: Runnable { | 695 | runnable: Runnable { |
@@ -712,7 +708,7 @@ fn main() { | |||
712 | }, | 708 | }, |
713 | }, | 709 | }, |
714 | Annotation { | 710 | Annotation { |
715 | range: 0..12, | 711 | range: 7..11, |
716 | kind: HasImpls { | 712 | kind: HasImpls { |
717 | position: FilePosition { | 713 | position: FilePosition { |
718 | file_id: FileId( | 714 | file_id: FileId( |
@@ -736,7 +732,7 @@ fn main() { | |||
736 | }, | 732 | }, |
737 | }, | 733 | }, |
738 | Annotation { | 734 | Annotation { |
739 | range: 0..12, | 735 | range: 7..11, |
740 | kind: HasReferences { | 736 | kind: HasReferences { |
741 | position: FilePosition { | 737 | position: FilePosition { |
742 | file_id: FileId( | 738 | file_id: FileId( |
@@ -816,7 +812,7 @@ mod tests { | |||
816 | expect![[r#" | 812 | expect![[r#" |
817 | [ | 813 | [ |
818 | Annotation { | 814 | Annotation { |
819 | range: 0..12, | 815 | range: 3..7, |
820 | kind: Runnable { | 816 | kind: Runnable { |
821 | debug: false, | 817 | debug: false, |
822 | runnable: Runnable { | 818 | runnable: Runnable { |
@@ -835,7 +831,7 @@ mod tests { | |||
835 | }, | 831 | }, |
836 | }, | 832 | }, |
837 | Annotation { | 833 | Annotation { |
838 | range: 0..12, | 834 | range: 3..7, |
839 | kind: Runnable { | 835 | kind: Runnable { |
840 | debug: true, | 836 | debug: true, |
841 | runnable: Runnable { | 837 | runnable: Runnable { |
@@ -854,7 +850,7 @@ mod tests { | |||
854 | }, | 850 | }, |
855 | }, | 851 | }, |
856 | Annotation { | 852 | Annotation { |
857 | range: 14..64, | 853 | range: 18..23, |
858 | kind: Runnable { | 854 | kind: Runnable { |
859 | debug: false, | 855 | debug: false, |
860 | runnable: Runnable { | 856 | runnable: Runnable { |
@@ -875,7 +871,7 @@ mod tests { | |||
875 | }, | 871 | }, |
876 | }, | 872 | }, |
877 | Annotation { | 873 | Annotation { |
878 | range: 14..64, | 874 | range: 18..23, |
879 | kind: Runnable { | 875 | kind: Runnable { |
880 | debug: true, | 876 | debug: true, |
881 | runnable: Runnable { | 877 | runnable: Runnable { |
@@ -896,7 +892,7 @@ mod tests { | |||
896 | }, | 892 | }, |
897 | }, | 893 | }, |
898 | Annotation { | 894 | Annotation { |
899 | range: 30..62, | 895 | range: 45..57, |
900 | kind: Runnable { | 896 | kind: Runnable { |
901 | debug: false, | 897 | debug: false, |
902 | runnable: Runnable { | 898 | runnable: Runnable { |
@@ -922,7 +918,7 @@ mod tests { | |||
922 | }, | 918 | }, |
923 | }, | 919 | }, |
924 | Annotation { | 920 | Annotation { |
925 | range: 30..62, | 921 | range: 45..57, |
926 | kind: Runnable { | 922 | kind: Runnable { |
927 | debug: true, | 923 | debug: true, |
928 | runnable: Runnable { | 924 | runnable: Runnable { |
diff --git a/crates/ide/src/diagnostics.rs b/crates/ide/src/diagnostics.rs index 27d347dbd..6cf5810fa 100644 --- a/crates/ide/src/diagnostics.rs +++ b/crates/ide/src/diagnostics.rs | |||
@@ -299,10 +299,10 @@ fn unresolved_fix(id: &'static str, label: &str, target: TextRange) -> Assist { | |||
299 | 299 | ||
300 | #[cfg(test)] | 300 | #[cfg(test)] |
301 | mod tests { | 301 | mod tests { |
302 | use expect_test::{expect, Expect}; | 302 | use expect_test::Expect; |
303 | use ide_assists::AssistResolveStrategy; | 303 | use ide_assists::AssistResolveStrategy; |
304 | use stdx::trim_indent; | 304 | use stdx::trim_indent; |
305 | use test_utils::assert_eq_text; | 305 | use test_utils::{assert_eq_text, extract_annotations}; |
306 | 306 | ||
307 | use crate::{fixture, DiagnosticsConfig}; | 307 | use crate::{fixture, DiagnosticsConfig}; |
308 | 308 | ||
@@ -311,6 +311,7 @@ mod tests { | |||
311 | /// * a diagnostic is produced | 311 | /// * a diagnostic is produced |
312 | /// * the first diagnostic fix trigger range touches the input cursor position | 312 | /// * the first diagnostic fix trigger range touches the input cursor position |
313 | /// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied | 313 | /// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied |
314 | #[track_caller] | ||
314 | pub(crate) fn check_fix(ra_fixture_before: &str, ra_fixture_after: &str) { | 315 | pub(crate) fn check_fix(ra_fixture_before: &str, ra_fixture_after: &str) { |
315 | check_nth_fix(0, ra_fixture_before, ra_fixture_after); | 316 | check_nth_fix(0, ra_fixture_before, ra_fixture_after); |
316 | } | 317 | } |
@@ -325,6 +326,7 @@ mod tests { | |||
325 | } | 326 | } |
326 | } | 327 | } |
327 | 328 | ||
329 | #[track_caller] | ||
328 | fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) { | 330 | fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) { |
329 | let after = trim_indent(ra_fixture_after); | 331 | let after = trim_indent(ra_fixture_after); |
330 | 332 | ||
@@ -396,26 +398,51 @@ mod tests { | |||
396 | expect.assert_debug_eq(&diagnostics) | 398 | expect.assert_debug_eq(&diagnostics) |
397 | } | 399 | } |
398 | 400 | ||
401 | pub(crate) fn check_diagnostics(ra_fixture: &str) { | ||
402 | let (analysis, file_id) = fixture::file(ra_fixture); | ||
403 | let diagnostics = analysis | ||
404 | .diagnostics(&DiagnosticsConfig::default(), AssistResolveStrategy::All, file_id) | ||
405 | .unwrap(); | ||
406 | |||
407 | let expected = extract_annotations(&*analysis.file_text(file_id).unwrap()); | ||
408 | let actual = diagnostics.into_iter().map(|d| (d.range, d.message)).collect::<Vec<_>>(); | ||
409 | assert_eq!(expected, actual); | ||
410 | } | ||
411 | |||
399 | #[test] | 412 | #[test] |
400 | fn test_unresolved_macro_range() { | 413 | fn test_unresolved_macro_range() { |
401 | check_expect( | 414 | check_diagnostics( |
402 | r#"foo::bar!(92);"#, | 415 | r#" |
403 | expect![[r#" | 416 | foo::bar!(92); |
404 | [ | 417 | //^^^ unresolved macro `foo::bar!` |
405 | Diagnostic { | 418 | "#, |
406 | message: "unresolved macro `foo::bar!`", | 419 | ); |
407 | range: 5..8, | 420 | } |
408 | severity: Error, | 421 | |
409 | fixes: None, | 422 | #[test] |
410 | unused: false, | 423 | fn unresolved_import_in_use_tree() { |
411 | code: Some( | 424 | // Only the relevant part of a nested `use` item should be highlighted. |
412 | DiagnosticCode( | 425 | check_diagnostics( |
413 | "unresolved-macro-call", | 426 | r#" |
414 | ), | 427 | use does_exist::{Exists, DoesntExist}; |
415 | ), | 428 | //^^^^^^^^^^^ unresolved import |
416 | }, | 429 | |
417 | ] | 430 | use {does_not_exist::*, does_exist}; |
418 | "#]], | 431 | //^^^^^^^^^^^^^^^^^ unresolved import |
432 | |||
433 | use does_not_exist::{ | ||
434 | a, | ||
435 | //^ unresolved import | ||
436 | b, | ||
437 | //^ unresolved import | ||
438 | c, | ||
439 | //^ unresolved import | ||
440 | }; | ||
441 | |||
442 | mod does_exist { | ||
443 | pub struct Exists; | ||
444 | } | ||
445 | "#, | ||
419 | ); | 446 | ); |
420 | } | 447 | } |
421 | 448 | ||
diff --git a/crates/ide/src/fixture.rs b/crates/ide/src/fixture.rs index cc6641ba1..6780af617 100644 --- a/crates/ide/src/fixture.rs +++ b/crates/ide/src/fixture.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! Utilities for creating `Analysis` instances for tests. | 1 | //! Utilities for creating `Analysis` instances for tests. |
2 | use ide_db::base_db::fixture::ChangeFixture; | 2 | use ide_db::base_db::fixture::ChangeFixture; |
3 | use syntax::{TextRange, TextSize}; | 3 | use syntax::{TextRange, TextSize}; |
4 | use test_utils::{extract_annotations, RangeOrOffset}; | 4 | use test_utils::extract_annotations; |
5 | 5 | ||
6 | use crate::{Analysis, AnalysisHost, FileId, FilePosition, FileRange}; | 6 | use crate::{Analysis, AnalysisHost, FileId, FilePosition, FileRange}; |
7 | 7 | ||
@@ -27,10 +27,7 @@ pub(crate) fn position(ra_fixture: &str) -> (Analysis, FilePosition) { | |||
27 | let change_fixture = ChangeFixture::parse(ra_fixture); | 27 | let change_fixture = ChangeFixture::parse(ra_fixture); |
28 | host.db.apply_change(change_fixture.change); | 28 | host.db.apply_change(change_fixture.change); |
29 | let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); | 29 | let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); |
30 | let offset = match range_or_offset { | 30 | let offset = range_or_offset.expect_offset(); |
31 | RangeOrOffset::Range(_) => panic!(), | ||
32 | RangeOrOffset::Offset(it) => it, | ||
33 | }; | ||
34 | (host.analysis(), FilePosition { file_id, offset }) | 31 | (host.analysis(), FilePosition { file_id, offset }) |
35 | } | 32 | } |
36 | 33 | ||
@@ -40,10 +37,7 @@ pub(crate) fn range(ra_fixture: &str) -> (Analysis, FileRange) { | |||
40 | let change_fixture = ChangeFixture::parse(ra_fixture); | 37 | let change_fixture = ChangeFixture::parse(ra_fixture); |
41 | host.db.apply_change(change_fixture.change); | 38 | host.db.apply_change(change_fixture.change); |
42 | let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); | 39 | let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); |
43 | let range = match range_or_offset { | 40 | let range = range_or_offset.expect_range(); |
44 | RangeOrOffset::Range(it) => it, | ||
45 | RangeOrOffset::Offset(_) => panic!(), | ||
46 | }; | ||
47 | (host.analysis(), FileRange { file_id, range }) | 41 | (host.analysis(), FileRange { file_id, range }) |
48 | } | 42 | } |
49 | 43 | ||
@@ -53,10 +47,7 @@ pub(crate) fn annotations(ra_fixture: &str) -> (Analysis, FilePosition, Vec<(Fil | |||
53 | let change_fixture = ChangeFixture::parse(ra_fixture); | 47 | let change_fixture = ChangeFixture::parse(ra_fixture); |
54 | host.db.apply_change(change_fixture.change); | 48 | host.db.apply_change(change_fixture.change); |
55 | let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); | 49 | let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); |
56 | let offset = match range_or_offset { | 50 | let offset = range_or_offset.expect_offset(); |
57 | RangeOrOffset::Range(_) => panic!(), | ||
58 | RangeOrOffset::Offset(it) => it, | ||
59 | }; | ||
60 | 51 | ||
61 | let annotations = change_fixture | 52 | let annotations = change_fixture |
62 | .files | 53 | .files |
diff --git a/crates/ide/src/folding_ranges.rs b/crates/ide/src/folding_ranges.rs index b893c1c54..c5015a345 100755 --- a/crates/ide/src/folding_ranges.rs +++ b/crates/ide/src/folding_ranges.rs | |||
@@ -19,6 +19,7 @@ pub enum FoldKind { | |||
19 | Statics, | 19 | Statics, |
20 | Array, | 20 | Array, |
21 | WhereClause, | 21 | WhereClause, |
22 | ReturnType, | ||
22 | } | 23 | } |
23 | 24 | ||
24 | #[derive(Debug)] | 25 | #[derive(Debug)] |
@@ -131,6 +132,7 @@ fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> { | |||
131 | COMMENT => Some(FoldKind::Comment), | 132 | COMMENT => Some(FoldKind::Comment), |
132 | ARG_LIST | PARAM_LIST => Some(FoldKind::ArgList), | 133 | ARG_LIST | PARAM_LIST => Some(FoldKind::ArgList), |
133 | ARRAY_EXPR => Some(FoldKind::Array), | 134 | ARRAY_EXPR => Some(FoldKind::Array), |
135 | RET_TYPE => Some(FoldKind::ReturnType), | ||
134 | ASSOC_ITEM_LIST | 136 | ASSOC_ITEM_LIST |
135 | | RECORD_FIELD_LIST | 137 | | RECORD_FIELD_LIST |
136 | | RECORD_PAT_FIELD_LIST | 138 | | RECORD_PAT_FIELD_LIST |
@@ -300,6 +302,7 @@ mod tests { | |||
300 | FoldKind::Statics => "statics", | 302 | FoldKind::Statics => "statics", |
301 | FoldKind::Array => "array", | 303 | FoldKind::Array => "array", |
302 | FoldKind::WhereClause => "whereclause", | 304 | FoldKind::WhereClause => "whereclause", |
305 | FoldKind::ReturnType => "returntype", | ||
303 | }; | 306 | }; |
304 | assert_eq!(kind, &attr.unwrap()); | 307 | assert_eq!(kind, &attr.unwrap()); |
305 | } | 308 | } |
@@ -560,4 +563,18 @@ where | |||
560 | "#, | 563 | "#, |
561 | ) | 564 | ) |
562 | } | 565 | } |
566 | |||
567 | #[test] | ||
568 | fn fold_return_type() { | ||
569 | check( | ||
570 | r#" | ||
571 | fn foo()<fold returntype>-> ( | ||
572 | bool, | ||
573 | bool, | ||
574 | )</fold> { (true, true) } | ||
575 | |||
576 | fn bar() -> (bool, bool) { (true, true) } | ||
577 | "#, | ||
578 | ) | ||
579 | } | ||
563 | } | 580 | } |
diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs index 05130a237..43356a94e 100644 --- a/crates/ide/src/goto_implementation.rs +++ b/crates/ide/src/goto_implementation.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use hir::{Impl, Semantics}; | 1 | use hir::{AsAssocItem, Impl, Semantics}; |
2 | use ide_db::{ | 2 | use ide_db::{ |
3 | defs::{Definition, NameClass, NameRefClass}, | 3 | defs::{Definition, NameClass, NameRefClass}, |
4 | RootDatabase, | 4 | RootDatabase, |
@@ -36,6 +36,7 @@ pub(crate) fn goto_implementation( | |||
36 | } | 36 | } |
37 | ast::NameLike::Lifetime(_) => None, | 37 | ast::NameLike::Lifetime(_) => None, |
38 | }?; | 38 | }?; |
39 | |||
39 | let def = match def { | 40 | let def = match def { |
40 | Definition::ModuleDef(def) => def, | 41 | Definition::ModuleDef(def) => def, |
41 | _ => return None, | 42 | _ => return None, |
@@ -48,6 +49,18 @@ pub(crate) fn goto_implementation( | |||
48 | let module = sema.to_module_def(position.file_id)?; | 49 | let module = sema.to_module_def(position.file_id)?; |
49 | impls_for_ty(&sema, builtin.ty(sema.db, module)) | 50 | impls_for_ty(&sema, builtin.ty(sema.db, module)) |
50 | } | 51 | } |
52 | hir::ModuleDef::Function(f) => { | ||
53 | let assoc = f.as_assoc_item(sema.db)?; | ||
54 | let name = assoc.name(sema.db)?; | ||
55 | let trait_ = assoc.containing_trait(sema.db)?; | ||
56 | impls_for_trait_item(&sema, trait_, name) | ||
57 | } | ||
58 | hir::ModuleDef::Const(c) => { | ||
59 | let assoc = c.as_assoc_item(sema.db)?; | ||
60 | let name = assoc.name(sema.db)?; | ||
61 | let trait_ = assoc.containing_trait(sema.db)?; | ||
62 | impls_for_trait_item(&sema, trait_, name) | ||
63 | } | ||
51 | _ => return None, | 64 | _ => return None, |
52 | }; | 65 | }; |
53 | Some(RangeInfo { range: node.syntax().text_range(), info: navs }) | 66 | Some(RangeInfo { range: node.syntax().text_range(), info: navs }) |
@@ -64,6 +77,23 @@ fn impls_for_trait(sema: &Semantics<RootDatabase>, trait_: hir::Trait) -> Vec<Na | |||
64 | .collect() | 77 | .collect() |
65 | } | 78 | } |
66 | 79 | ||
80 | fn impls_for_trait_item( | ||
81 | sema: &Semantics<RootDatabase>, | ||
82 | trait_: hir::Trait, | ||
83 | fun_name: hir::Name, | ||
84 | ) -> Vec<NavigationTarget> { | ||
85 | Impl::all_for_trait(sema.db, trait_) | ||
86 | .into_iter() | ||
87 | .filter_map(|imp| { | ||
88 | let item = imp.items(sema.db).iter().find_map(|itm| { | ||
89 | let itm_name = itm.name(sema.db)?; | ||
90 | (itm_name == fun_name).then(|| itm.clone()) | ||
91 | })?; | ||
92 | item.try_to_nav(sema.db) | ||
93 | }) | ||
94 | .collect() | ||
95 | } | ||
96 | |||
67 | #[cfg(test)] | 97 | #[cfg(test)] |
68 | mod tests { | 98 | mod tests { |
69 | use ide_db::base_db::FileRange; | 99 | use ide_db::base_db::FileRange; |
@@ -262,4 +292,42 @@ impl bool {} | |||
262 | "#, | 292 | "#, |
263 | ); | 293 | ); |
264 | } | 294 | } |
295 | |||
296 | #[test] | ||
297 | fn goto_implementation_trait_functions() { | ||
298 | check( | ||
299 | r#" | ||
300 | trait Tr { | ||
301 | fn f$0(); | ||
302 | } | ||
303 | |||
304 | struct S; | ||
305 | |||
306 | impl Tr for S { | ||
307 | fn f() { | ||
308 | //^ | ||
309 | println!("Hello, world!"); | ||
310 | } | ||
311 | } | ||
312 | "#, | ||
313 | ); | ||
314 | } | ||
315 | |||
316 | #[test] | ||
317 | fn goto_implementation_trait_assoc_const() { | ||
318 | check( | ||
319 | r#" | ||
320 | trait Tr { | ||
321 | const C$0: usize; | ||
322 | } | ||
323 | |||
324 | struct S; | ||
325 | |||
326 | impl Tr for S { | ||
327 | const C: usize = 4; | ||
328 | //^ | ||
329 | } | ||
330 | "#, | ||
331 | ); | ||
332 | } | ||
265 | } | 333 | } |
diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs index f3284bb96..004d9cb68 100644 --- a/crates/ide/src/goto_type_definition.rs +++ b/crates/ide/src/goto_type_definition.rs | |||
@@ -1,3 +1,4 @@ | |||
1 | use ide_db::base_db::Upcast; | ||
1 | use ide_db::RootDatabase; | 2 | use ide_db::RootDatabase; |
2 | use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset, T}; | 3 | use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset, T}; |
3 | 4 | ||
@@ -31,6 +32,7 @@ pub(crate) fn goto_type_definition( | |||
31 | ast::Pat(it) => sema.type_of_pat(&it)?, | 32 | ast::Pat(it) => sema.type_of_pat(&it)?, |
32 | ast::SelfParam(it) => sema.type_of_self(&it)?, | 33 | ast::SelfParam(it) => sema.type_of_self(&it)?, |
33 | ast::Type(it) => sema.resolve_type(&it)?, | 34 | ast::Type(it) => sema.resolve_type(&it)?, |
35 | ast::RecordField(it) => sema.to_def(&it).map(|d| d.ty(db.upcast()))?, | ||
34 | _ => return None, | 36 | _ => return None, |
35 | } | 37 | } |
36 | }; | 38 | }; |
@@ -161,4 +163,34 @@ impl Foo$0 {} | |||
161 | "#, | 163 | "#, |
162 | ) | 164 | ) |
163 | } | 165 | } |
166 | |||
167 | #[test] | ||
168 | fn goto_def_for_struct_field() { | ||
169 | check( | ||
170 | r#" | ||
171 | struct Bar; | ||
172 | //^^^ | ||
173 | |||
174 | struct Foo { | ||
175 | bar$0: Bar, | ||
176 | } | ||
177 | "#, | ||
178 | ); | ||
179 | } | ||
180 | |||
181 | #[test] | ||
182 | fn goto_def_for_enum_struct_field() { | ||
183 | check( | ||
184 | r#" | ||
185 | struct Bar; | ||
186 | //^^^ | ||
187 | |||
188 | enum Foo { | ||
189 | Bar { | ||
190 | bar$0: Bar | ||
191 | }, | ||
192 | } | ||
193 | "#, | ||
194 | ); | ||
195 | } | ||
164 | } | 196 | } |
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 8f490e922..85f887737 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs | |||
@@ -219,7 +219,7 @@ fn hint_iterator( | |||
219 | ) -> Option<SmolStr> { | 219 | ) -> Option<SmolStr> { |
220 | let db = sema.db; | 220 | let db = sema.db; |
221 | let strukt = ty.strip_references().as_adt()?; | 221 | let strukt = ty.strip_references().as_adt()?; |
222 | let krate = strukt.krate(db); | 222 | let krate = strukt.module(db).krate(); |
223 | if krate != famous_defs.core()? { | 223 | if krate != famous_defs.core()? { |
224 | return None; | 224 | return None; |
225 | } | 225 | } |
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index f76715d84..ce1c76f37 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs | |||
@@ -227,7 +227,7 @@ pub(crate) fn runnable_fn(sema: &Semantics<RootDatabase>, def: hir::Function) -> | |||
227 | let func = def.source(sema.db)?; | 227 | let func = def.source(sema.db)?; |
228 | let name_string = def.name(sema.db).to_string(); | 228 | let name_string = def.name(sema.db).to_string(); |
229 | 229 | ||
230 | let root = def.krate(sema.db)?.root_module(sema.db); | 230 | let root = def.module(sema.db).krate().root_module(sema.db); |
231 | 231 | ||
232 | let kind = if name_string == "main" && def.module(sema.db) == root { | 232 | let kind = if name_string == "main" && def.module(sema.db) == root { |
233 | RunnableKind::Bin | 233 | RunnableKind::Bin |
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index 9df8d21af..79c2f4a1e 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs | |||
@@ -42,13 +42,107 @@ pub struct HlRange { | |||
42 | // Feature: Semantic Syntax Highlighting | 42 | // Feature: Semantic Syntax Highlighting |
43 | // | 43 | // |
44 | // rust-analyzer highlights the code semantically. | 44 | // rust-analyzer highlights the code semantically. |
45 | // For example, `bar` in `foo::Bar` might be colored differently depending on whether `Bar` is an enum or a trait. | 45 | // For example, `Bar` in `foo::Bar` might be colored differently depending on whether `Bar` is an enum or a trait. |
46 | // rust-analyzer does not specify colors directly, instead it assigns tag (like `struct`) and a set of modifiers (like `declaration`) to each token. | 46 | // rust-analyzer does not specify colors directly, instead it assigns a tag (like `struct`) and a set of modifiers (like `declaration`) to each token. |
47 | // It's up to the client to map those to specific colors. | 47 | // It's up to the client to map those to specific colors. |
48 | // | 48 | // |
49 | // The general rule is that a reference to an entity gets colored the same way as the entity itself. | 49 | // The general rule is that a reference to an entity gets colored the same way as the entity itself. |
50 | // We also give special modifier for `mut` and `&mut` local variables. | 50 | // We also give special modifier for `mut` and `&mut` local variables. |
51 | // | 51 | // |
52 | // | ||
53 | // .Token Tags | ||
54 | // | ||
55 | // Rust-analyzer currently emits the following token tags: | ||
56 | // | ||
57 | // - For items: | ||
58 | // + | ||
59 | // [horizontal] | ||
60 | // enum:: Emitted for enums. | ||
61 | // function:: Emitted for free-standing functions. | ||
62 | // macro:: Emitted for macros. | ||
63 | // method:: Emitted for associated functions, also knowns as methods. | ||
64 | // namespace:: Emitted for modules. | ||
65 | // struct:: Emitted for structs. | ||
66 | // trait:: Emitted for traits. | ||
67 | // typeAlias:: Emitted for type aliases and `Self` in `impl`s. | ||
68 | // union:: Emitted for unions. | ||
69 | // | ||
70 | // - For literals: | ||
71 | // + | ||
72 | // [horizontal] | ||
73 | // boolean:: Emitted for the boolean literals `true` and `false`. | ||
74 | // character:: Emitted for character literals. | ||
75 | // number:: Emitted for numeric literals. | ||
76 | // string:: Emitted for string literals. | ||
77 | // escapeSequence:: Emitted for escaped sequences inside strings like `\n`. | ||
78 | // formatSpecifier:: Emitted for format specifiers `{:?}` in `format!`-like macros. | ||
79 | // | ||
80 | // - For operators: | ||
81 | // + | ||
82 | // [horizontal] | ||
83 | // operator:: Emitted for general operators. | ||
84 | // arithmetic:: Emitted for the arithmetic operators `+`, `-`, `*`, `/`, `+=`, `-=`, `*=`, `/=`. | ||
85 | // bitwise:: Emitted for the bitwise operators `|`, `&`, `!`, `^`, `|=`, `&=`, `^=`. | ||
86 | // comparison:: Emitted for the comparison operators `>`, `<`, `==`, `>=`, `<=`, `!=`. | ||
87 | // logical:: Emitted for the logical operators `||`, `&&`, `!`. | ||
88 | // | ||
89 | // - For punctuation: | ||
90 | // + | ||
91 | // [horizontal] | ||
92 | // punctuation:: Emitted for general punctuation. | ||
93 | // angle:: Emitted for `<>` angle brackets. | ||
94 | // brace:: Emitted for `{}` braces. | ||
95 | // bracket:: Emitted for `[]` brackets. | ||
96 | // parenthesis:: Emitted for `()` parentheses. | ||
97 | // colon:: Emitted for the `:` token. | ||
98 | // comma:: Emitted for the `,` token. | ||
99 | // dot:: Emitted for the `.` token. | ||
100 | // Semi:: Emitted for the `;` token. | ||
101 | // | ||
102 | // //- | ||
103 | // | ||
104 | // [horizontal] | ||
105 | // attribute:: Emitted for attributes. | ||
106 | // builtinType:: Emitted for builtin types like `u32`, `str` and `f32`. | ||
107 | // comment:: Emitted for comments. | ||
108 | // constParameter:: Emitted for const parameters. | ||
109 | // enumMember:: Emitted for enum variants. | ||
110 | // generic:: Emitted for generic tokens that have no mapping. | ||
111 | // keyword:: Emitted for keywords. | ||
112 | // label:: Emitted for labels. | ||
113 | // lifetime:: Emitted for lifetimes. | ||
114 | // parameter:: Emitted for non-self function parameters. | ||
115 | // property:: Emitted for struct and union fields. | ||
116 | // selfKeyword:: Emitted for the self function parameter and self path-specifier. | ||
117 | // typeParameter:: Emitted for type parameters. | ||
118 | // unresolvedReference:: Emitted for unresolved references, names that rust-analyzer can't find the definition of. | ||
119 | // variable:: Emitted for locals, constants and statics. | ||
120 | // | ||
121 | // | ||
122 | // .Token Modifiers | ||
123 | // | ||
124 | // Token modifiers allow to style some elements in the source code more precisely. | ||
125 | // | ||
126 | // Rust-analyzer currently emits the following token modifiers: | ||
127 | // | ||
128 | // [horizontal] | ||
129 | // async:: Emitted for async functions and the `async` and `await` keywords. | ||
130 | // attribute:: Emitted for tokens inside attributes. | ||
131 | // callable:: Emitted for locals whose types implements one of the `Fn*` traits. | ||
132 | // constant:: Emitted for consts. | ||
133 | // consuming:: Emitted for locals that are being consumed when use in a function call. | ||
134 | // controlFlow:: Emitted for control-flow related tokens, this includes the `?` operator. | ||
135 | // declaration:: Emitted for names of definitions, like `foo` in `fn foo() {}`. | ||
136 | // documentation:: Emitted for documentation comments. | ||
137 | // injected:: Emitted for doc-string injected highlighting like rust source blocks in documentation. | ||
138 | // intraDocLink:: Emitted for intra doc links in doc-strings. | ||
139 | // library:: Emitted for items that are defined outside of the current crate. | ||
140 | // mutable:: Emitted for mutable locals and statics. | ||
141 | // static:: Emitted for "static" functions, also known as functions that do not take a `self` param, as well as statics and consts. | ||
142 | // trait:: Emitted for associated trait items. | ||
143 | // unsafe:: Emitted for unsafe operations, like unsafe function calls, as well as the `unsafe` token. | ||
144 | // | ||
145 | // | ||
52 | // image::https://user-images.githubusercontent.com/48062697/113164457-06cfb980-9239-11eb-819b-0f93e646acf8.png[] | 146 | // image::https://user-images.githubusercontent.com/48062697/113164457-06cfb980-9239-11eb-819b-0f93e646acf8.png[] |
53 | // image::https://user-images.githubusercontent.com/48062697/113187625-f7f50100-9250-11eb-825e-91c58f236071.png[] | 147 | // image::https://user-images.githubusercontent.com/48062697/113187625-f7f50100-9250-11eb-825e-91c58f236071.png[] |
54 | pub(crate) fn highlight( | 148 | pub(crate) fn highlight( |
@@ -80,6 +174,7 @@ pub(crate) fn highlight( | |||
80 | &mut hl, | 174 | &mut hl, |
81 | &sema, | 175 | &sema, |
82 | InFile::new(file_id.into(), &root), | 176 | InFile::new(file_id.into(), &root), |
177 | sema.scope(&root).krate(), | ||
83 | range_to_highlight, | 178 | range_to_highlight, |
84 | syntactic_name_ref_highlighting, | 179 | syntactic_name_ref_highlighting, |
85 | ); | 180 | ); |
@@ -90,6 +185,7 @@ fn traverse( | |||
90 | hl: &mut Highlights, | 185 | hl: &mut Highlights, |
91 | sema: &Semantics<RootDatabase>, | 186 | sema: &Semantics<RootDatabase>, |
92 | root: InFile<&SyntaxNode>, | 187 | root: InFile<&SyntaxNode>, |
188 | krate: Option<hir::Crate>, | ||
93 | range_to_highlight: TextRange, | 189 | range_to_highlight: TextRange, |
94 | syntactic_name_ref_highlighting: bool, | 190 | syntactic_name_ref_highlighting: bool, |
95 | ) { | 191 | ) { |
@@ -209,6 +305,7 @@ fn traverse( | |||
209 | 305 | ||
210 | if let Some((mut highlight, binding_hash)) = highlight::element( | 306 | if let Some((mut highlight, binding_hash)) = highlight::element( |
211 | &sema, | 307 | &sema, |
308 | krate, | ||
212 | &mut bindings_shadow_count, | 309 | &mut bindings_shadow_count, |
213 | syntactic_name_ref_highlighting, | 310 | syntactic_name_ref_highlighting, |
214 | element_to_highlight.clone(), | 311 | element_to_highlight.clone(), |
diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index 058e37ff0..9503c936d 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs | |||
@@ -19,6 +19,7 @@ use crate::{ | |||
19 | 19 | ||
20 | pub(super) fn element( | 20 | pub(super) fn element( |
21 | sema: &Semantics<RootDatabase>, | 21 | sema: &Semantics<RootDatabase>, |
22 | krate: Option<hir::Crate>, | ||
22 | bindings_shadow_count: &mut FxHashMap<hir::Name, u32>, | 23 | bindings_shadow_count: &mut FxHashMap<hir::Name, u32>, |
23 | syntactic_name_ref_highlighting: bool, | 24 | syntactic_name_ref_highlighting: bool, |
24 | element: SyntaxElement, | 25 | element: SyntaxElement, |
@@ -46,8 +47,10 @@ pub(super) fn element( | |||
46 | 47 | ||
47 | match name_kind { | 48 | match name_kind { |
48 | Some(NameClass::ExternCrate(_)) => SymbolKind::Module.into(), | 49 | Some(NameClass::ExternCrate(_)) => SymbolKind::Module.into(), |
49 | Some(NameClass::Definition(def)) => highlight_def(db, def) | HlMod::Definition, | 50 | Some(NameClass::Definition(def)) => { |
50 | Some(NameClass::ConstReference(def)) => highlight_def(db, def), | 51 | highlight_def(db, krate, def) | HlMod::Definition |
52 | } | ||
53 | Some(NameClass::ConstReference(def)) => highlight_def(db, krate, def), | ||
51 | Some(NameClass::PatFieldShorthand { field_ref, .. }) => { | 54 | Some(NameClass::PatFieldShorthand { field_ref, .. }) => { |
52 | let mut h = HlTag::Symbol(SymbolKind::Field).into(); | 55 | let mut h = HlTag::Symbol(SymbolKind::Field).into(); |
53 | if let Definition::Field(field) = field_ref { | 56 | if let Definition::Field(field) = field_ref { |
@@ -68,7 +71,7 @@ pub(super) fn element( | |||
68 | } | 71 | } |
69 | NAME_REF => { | 72 | NAME_REF => { |
70 | let name_ref = element.into_node().and_then(ast::NameRef::cast).unwrap(); | 73 | let name_ref = element.into_node().and_then(ast::NameRef::cast).unwrap(); |
71 | highlight_func_by_name_ref(sema, &name_ref).unwrap_or_else(|| { | 74 | highlight_func_by_name_ref(sema, krate, &name_ref).unwrap_or_else(|| { |
72 | let is_self = name_ref.self_token().is_some(); | 75 | let is_self = name_ref.self_token().is_some(); |
73 | let h = match NameRefClass::classify(sema, &name_ref) { | 76 | let h = match NameRefClass::classify(sema, &name_ref) { |
74 | Some(name_kind) => match name_kind { | 77 | Some(name_kind) => match name_kind { |
@@ -82,7 +85,7 @@ pub(super) fn element( | |||
82 | } | 85 | } |
83 | }; | 86 | }; |
84 | 87 | ||
85 | let mut h = highlight_def(db, def); | 88 | let mut h = highlight_def(db, krate, def); |
86 | 89 | ||
87 | if let Definition::Local(local) = &def { | 90 | if let Definition::Local(local) = &def { |
88 | if is_consumed_lvalue(name_ref.syntax().clone().into(), local, db) { | 91 | if is_consumed_lvalue(name_ref.syntax().clone().into(), local, db) { |
@@ -105,7 +108,7 @@ pub(super) fn element( | |||
105 | NameRefClass::FieldShorthand { .. } => SymbolKind::Field.into(), | 108 | NameRefClass::FieldShorthand { .. } => SymbolKind::Field.into(), |
106 | }, | 109 | }, |
107 | None if syntactic_name_ref_highlighting => { | 110 | None if syntactic_name_ref_highlighting => { |
108 | highlight_name_ref_by_syntax(name_ref, sema) | 111 | highlight_name_ref_by_syntax(name_ref, sema, krate) |
109 | } | 112 | } |
110 | None => HlTag::UnresolvedReference.into(), | 113 | None => HlTag::UnresolvedReference.into(), |
111 | }; | 114 | }; |
@@ -136,9 +139,11 @@ pub(super) fn element( | |||
136 | let lifetime = element.into_node().and_then(ast::Lifetime::cast).unwrap(); | 139 | let lifetime = element.into_node().and_then(ast::Lifetime::cast).unwrap(); |
137 | 140 | ||
138 | match NameClass::classify_lifetime(sema, &lifetime) { | 141 | match NameClass::classify_lifetime(sema, &lifetime) { |
139 | Some(NameClass::Definition(def)) => highlight_def(db, def) | HlMod::Definition, | 142 | Some(NameClass::Definition(def)) => { |
143 | highlight_def(db, krate, def) | HlMod::Definition | ||
144 | } | ||
140 | None => match NameRefClass::classify_lifetime(sema, &lifetime) { | 145 | None => match NameRefClass::classify_lifetime(sema, &lifetime) { |
141 | Some(NameRefClass::Definition(def)) => highlight_def(db, def), | 146 | Some(NameRefClass::Definition(def)) => highlight_def(db, krate, def), |
142 | _ => SymbolKind::LifetimeParam.into(), | 147 | _ => SymbolKind::LifetimeParam.into(), |
143 | }, | 148 | }, |
144 | _ => Highlight::from(SymbolKind::LifetimeParam) | HlMod::Definition, | 149 | _ => Highlight::from(SymbolKind::LifetimeParam) | HlMod::Definition, |
@@ -277,12 +282,12 @@ pub(super) fn element( | |||
277 | hash((name, shadow_count)) | 282 | hash((name, shadow_count)) |
278 | } | 283 | } |
279 | } | 284 | } |
280 | fn highlight_def(db: &RootDatabase, def: Definition) -> Highlight { | 285 | fn highlight_def(db: &RootDatabase, krate: Option<hir::Crate>, def: Definition) -> Highlight { |
281 | match def { | 286 | let mut h = match def { |
282 | Definition::Macro(_) => HlTag::Symbol(SymbolKind::Macro), | 287 | Definition::Macro(_) => Highlight::new(HlTag::Symbol(SymbolKind::Macro)), |
283 | Definition::Field(_) => HlTag::Symbol(SymbolKind::Field), | 288 | Definition::Field(_) => Highlight::new(HlTag::Symbol(SymbolKind::Field)), |
284 | Definition::ModuleDef(def) => match def { | 289 | Definition::ModuleDef(def) => match def { |
285 | hir::ModuleDef::Module(_) => HlTag::Symbol(SymbolKind::Module), | 290 | hir::ModuleDef::Module(_) => Highlight::new(HlTag::Symbol(SymbolKind::Module)), |
286 | hir::ModuleDef::Function(func) => { | 291 | hir::ModuleDef::Function(func) => { |
287 | let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Function)); | 292 | let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Function)); |
288 | if let Some(item) = func.as_assoc_item(db) { | 293 | if let Some(item) = func.as_assoc_item(db) { |
@@ -314,14 +319,22 @@ fn highlight_def(db: &RootDatabase, def: Definition) -> Highlight { | |||
314 | if func.is_async(db) { | 319 | if func.is_async(db) { |
315 | h |= HlMod::Async; | 320 | h |= HlMod::Async; |
316 | } | 321 | } |
317 | return h; | 322 | |
323 | h | ||
324 | } | ||
325 | hir::ModuleDef::Adt(adt) => { | ||
326 | let h = match adt { | ||
327 | hir::Adt::Struct(_) => HlTag::Symbol(SymbolKind::Struct), | ||
328 | hir::Adt::Enum(_) => HlTag::Symbol(SymbolKind::Enum), | ||
329 | hir::Adt::Union(_) => HlTag::Symbol(SymbolKind::Union), | ||
330 | }; | ||
331 | |||
332 | Highlight::new(h) | ||
318 | } | 333 | } |
319 | hir::ModuleDef::Adt(hir::Adt::Struct(_)) => HlTag::Symbol(SymbolKind::Struct), | 334 | hir::ModuleDef::Variant(_) => Highlight::new(HlTag::Symbol(SymbolKind::Variant)), |
320 | hir::ModuleDef::Adt(hir::Adt::Enum(_)) => HlTag::Symbol(SymbolKind::Enum), | ||
321 | hir::ModuleDef::Adt(hir::Adt::Union(_)) => HlTag::Symbol(SymbolKind::Union), | ||
322 | hir::ModuleDef::Variant(_) => HlTag::Symbol(SymbolKind::Variant), | ||
323 | hir::ModuleDef::Const(konst) => { | 335 | hir::ModuleDef::Const(konst) => { |
324 | let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Const)); | 336 | let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Const)); |
337 | |||
325 | if let Some(item) = konst.as_assoc_item(db) { | 338 | if let Some(item) = konst.as_assoc_item(db) { |
326 | h |= HlMod::Associated; | 339 | h |= HlMod::Associated; |
327 | match item.container(db) { | 340 | match item.container(db) { |
@@ -336,7 +349,7 @@ fn highlight_def(db: &RootDatabase, def: Definition) -> Highlight { | |||
336 | } | 349 | } |
337 | } | 350 | } |
338 | 351 | ||
339 | return h; | 352 | h |
340 | } | 353 | } |
341 | hir::ModuleDef::Trait(trait_) => { | 354 | hir::ModuleDef::Trait(trait_) => { |
342 | let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Trait)); | 355 | let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Trait)); |
@@ -344,10 +357,12 @@ fn highlight_def(db: &RootDatabase, def: Definition) -> Highlight { | |||
344 | if trait_.is_unsafe(db) { | 357 | if trait_.is_unsafe(db) { |
345 | h |= HlMod::Unsafe; | 358 | h |= HlMod::Unsafe; |
346 | } | 359 | } |
347 | return h; | 360 | |
361 | h | ||
348 | } | 362 | } |
349 | hir::ModuleDef::TypeAlias(type_) => { | 363 | hir::ModuleDef::TypeAlias(type_) => { |
350 | let mut h = Highlight::new(HlTag::Symbol(SymbolKind::TypeAlias)); | 364 | let mut h = Highlight::new(HlTag::Symbol(SymbolKind::TypeAlias)); |
365 | |||
351 | if let Some(item) = type_.as_assoc_item(db) { | 366 | if let Some(item) = type_.as_assoc_item(db) { |
352 | h |= HlMod::Associated; | 367 | h |= HlMod::Associated; |
353 | match item.container(db) { | 368 | match item.container(db) { |
@@ -361,23 +376,30 @@ fn highlight_def(db: &RootDatabase, def: Definition) -> Highlight { | |||
361 | } | 376 | } |
362 | } | 377 | } |
363 | } | 378 | } |
364 | return h; | 379 | |
380 | h | ||
365 | } | 381 | } |
366 | hir::ModuleDef::BuiltinType(_) => HlTag::BuiltinType, | 382 | hir::ModuleDef::BuiltinType(_) => Highlight::new(HlTag::BuiltinType), |
367 | hir::ModuleDef::Static(s) => { | 383 | hir::ModuleDef::Static(s) => { |
368 | let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Static)); | 384 | let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Static)); |
385 | |||
369 | if s.is_mut(db) { | 386 | if s.is_mut(db) { |
370 | h |= HlMod::Mutable; | 387 | h |= HlMod::Mutable; |
371 | h |= HlMod::Unsafe; | 388 | h |= HlMod::Unsafe; |
372 | } | 389 | } |
373 | return h; | 390 | |
391 | h | ||
374 | } | 392 | } |
375 | }, | 393 | }, |
376 | Definition::SelfType(_) => HlTag::Symbol(SymbolKind::Impl), | 394 | Definition::SelfType(_) => Highlight::new(HlTag::Symbol(SymbolKind::Impl)), |
377 | Definition::GenericParam(it) => match it { | 395 | Definition::GenericParam(it) => match it { |
378 | hir::GenericParam::TypeParam(_) => HlTag::Symbol(SymbolKind::TypeParam), | 396 | hir::GenericParam::TypeParam(_) => Highlight::new(HlTag::Symbol(SymbolKind::TypeParam)), |
379 | hir::GenericParam::ConstParam(_) => HlTag::Symbol(SymbolKind::ConstParam), | 397 | hir::GenericParam::ConstParam(_) => { |
380 | hir::GenericParam::LifetimeParam(_) => HlTag::Symbol(SymbolKind::LifetimeParam), | 398 | Highlight::new(HlTag::Symbol(SymbolKind::ConstParam)) |
399 | } | ||
400 | hir::GenericParam::LifetimeParam(_) => { | ||
401 | Highlight::new(HlTag::Symbol(SymbolKind::LifetimeParam)) | ||
402 | } | ||
381 | }, | 403 | }, |
382 | Definition::Local(local) => { | 404 | Definition::Local(local) => { |
383 | let tag = if local.is_self(db) { | 405 | let tag = if local.is_self(db) { |
@@ -395,28 +417,40 @@ fn highlight_def(db: &RootDatabase, def: Definition) -> Highlight { | |||
395 | if ty.as_callable(db).is_some() || ty.impls_fnonce(db) { | 417 | if ty.as_callable(db).is_some() || ty.impls_fnonce(db) { |
396 | h |= HlMod::Callable; | 418 | h |= HlMod::Callable; |
397 | } | 419 | } |
398 | return h; | 420 | h |
399 | } | 421 | } |
400 | Definition::Label(_) => HlTag::Symbol(SymbolKind::Label), | 422 | Definition::Label(_) => Highlight::new(HlTag::Symbol(SymbolKind::Label)), |
423 | }; | ||
424 | |||
425 | let is_from_other_crate = def.module(db).map(hir::Module::krate) != krate; | ||
426 | let is_builtin_type = matches!(def, Definition::ModuleDef(hir::ModuleDef::BuiltinType(_))); | ||
427 | |||
428 | if is_from_other_crate && !is_builtin_type { | ||
429 | h |= HlMod::Library; | ||
401 | } | 430 | } |
402 | .into() | 431 | |
432 | h | ||
403 | } | 433 | } |
404 | 434 | ||
405 | fn highlight_func_by_name_ref( | 435 | fn highlight_func_by_name_ref( |
406 | sema: &Semantics<RootDatabase>, | 436 | sema: &Semantics<RootDatabase>, |
437 | krate: Option<hir::Crate>, | ||
407 | name_ref: &ast::NameRef, | 438 | name_ref: &ast::NameRef, |
408 | ) -> Option<Highlight> { | 439 | ) -> Option<Highlight> { |
409 | let mc = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast)?; | 440 | let mc = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast)?; |
410 | highlight_method_call(sema, &mc) | 441 | highlight_method_call(sema, krate, &mc) |
411 | } | 442 | } |
412 | 443 | ||
413 | fn highlight_method_call( | 444 | fn highlight_method_call( |
414 | sema: &Semantics<RootDatabase>, | 445 | sema: &Semantics<RootDatabase>, |
446 | krate: Option<hir::Crate>, | ||
415 | method_call: &ast::MethodCallExpr, | 447 | method_call: &ast::MethodCallExpr, |
416 | ) -> Option<Highlight> { | 448 | ) -> Option<Highlight> { |
417 | let func = sema.resolve_method_call(&method_call)?; | 449 | let func = sema.resolve_method_call(&method_call)?; |
450 | |||
418 | let mut h = SymbolKind::Function.into(); | 451 | let mut h = SymbolKind::Function.into(); |
419 | h |= HlMod::Associated; | 452 | h |= HlMod::Associated; |
453 | |||
420 | if func.is_unsafe(sema.db) || sema.is_unsafe_method_call(&method_call) { | 454 | if func.is_unsafe(sema.db) || sema.is_unsafe_method_call(&method_call) { |
421 | h |= HlMod::Unsafe; | 455 | h |= HlMod::Unsafe; |
422 | } | 456 | } |
@@ -424,7 +458,10 @@ fn highlight_method_call( | |||
424 | h |= HlMod::Async; | 458 | h |= HlMod::Async; |
425 | } | 459 | } |
426 | if func.as_assoc_item(sema.db).and_then(|it| it.containing_trait(sema.db)).is_some() { | 460 | if func.as_assoc_item(sema.db).and_then(|it| it.containing_trait(sema.db)).is_some() { |
427 | h |= HlMod::Trait | 461 | h |= HlMod::Trait; |
462 | } | ||
463 | if Some(func.module(sema.db).krate()) != krate { | ||
464 | h |= HlMod::Library; | ||
428 | } | 465 | } |
429 | 466 | ||
430 | if let Some(self_param) = func.self_param(sema.db) { | 467 | if let Some(self_param) = func.self_param(sema.db) { |
@@ -473,7 +510,11 @@ fn highlight_name_by_syntax(name: ast::Name) -> Highlight { | |||
473 | tag.into() | 510 | tag.into() |
474 | } | 511 | } |
475 | 512 | ||
476 | fn highlight_name_ref_by_syntax(name: ast::NameRef, sema: &Semantics<RootDatabase>) -> Highlight { | 513 | fn highlight_name_ref_by_syntax( |
514 | name: ast::NameRef, | ||
515 | sema: &Semantics<RootDatabase>, | ||
516 | krate: Option<hir::Crate>, | ||
517 | ) -> Highlight { | ||
477 | let default = HlTag::UnresolvedReference; | 518 | let default = HlTag::UnresolvedReference; |
478 | 519 | ||
479 | let parent = match name.syntax().parent() { | 520 | let parent = match name.syntax().parent() { |
@@ -484,7 +525,7 @@ fn highlight_name_ref_by_syntax(name: ast::NameRef, sema: &Semantics<RootDatabas | |||
484 | match parent.kind() { | 525 | match parent.kind() { |
485 | METHOD_CALL_EXPR => { | 526 | METHOD_CALL_EXPR => { |
486 | return ast::MethodCallExpr::cast(parent) | 527 | return ast::MethodCallExpr::cast(parent) |
487 | .and_then(|it| highlight_method_call(sema, &it)) | 528 | .and_then(|it| highlight_method_call(sema, krate, &it)) |
488 | .unwrap_or_else(|| SymbolKind::Function.into()); | 529 | .unwrap_or_else(|| SymbolKind::Function.into()); |
489 | } | 530 | } |
490 | FIELD_EXPR => { | 531 | FIELD_EXPR => { |
diff --git a/crates/ide/src/syntax_highlighting/tags.rs b/crates/ide/src/syntax_highlighting/tags.rs index 27473a2f9..9d481deae 100644 --- a/crates/ide/src/syntax_highlighting/tags.rs +++ b/crates/ide/src/syntax_highlighting/tags.rs | |||
@@ -37,6 +37,8 @@ pub enum HlTag { | |||
37 | None, | 37 | None, |
38 | } | 38 | } |
39 | 39 | ||
40 | // Don't forget to adjust the feature description in crates/ide/src/syntax_highlighting.rs. | ||
41 | // And make sure to use the lsp strings used when converting to the protocol in crates\rust-analyzer\src\semantic_tokens.rs, not the names of the variants here. | ||
40 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] | 42 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] |
41 | #[repr(u8)] | 43 | #[repr(u8)] |
42 | pub enum HlMod { | 44 | pub enum HlMod { |
@@ -67,6 +69,8 @@ pub enum HlMod { | |||
67 | Trait, | 69 | Trait, |
68 | /// Used with keywords like `async` and `await`. | 70 | /// Used with keywords like `async` and `await`. |
69 | Async, | 71 | Async, |
72 | /// Used for items from other crates. | ||
73 | Library, | ||
70 | // Keep this last! | 74 | // Keep this last! |
71 | /// Used for unsafe functions, unsafe traits, mutable statics, union accesses and unsafe operations. | 75 | /// Used for unsafe functions, unsafe traits, mutable statics, union accesses and unsafe operations. |
72 | Unsafe, | 76 | Unsafe, |
@@ -189,6 +193,7 @@ impl HlMod { | |||
189 | HlMod::Static, | 193 | HlMod::Static, |
190 | HlMod::Trait, | 194 | HlMod::Trait, |
191 | HlMod::Async, | 195 | HlMod::Async, |
196 | HlMod::Library, | ||
192 | HlMod::Unsafe, | 197 | HlMod::Unsafe, |
193 | ]; | 198 | ]; |
194 | 199 | ||
@@ -207,6 +212,7 @@ impl HlMod { | |||
207 | HlMod::Static => "static", | 212 | HlMod::Static => "static", |
208 | HlMod::Trait => "trait", | 213 | HlMod::Trait => "trait", |
209 | HlMod::Async => "async", | 214 | HlMod::Async => "async", |
215 | HlMod::Library => "library", | ||
210 | HlMod::Unsafe => "unsafe", | 216 | HlMod::Unsafe => "unsafe", |
211 | } | 217 | } |
212 | } | 218 | } |
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlighting.html b/crates/ide/src/syntax_highlighting/test_data/highlighting.html index 878431b56..0264e39a3 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlighting.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlighting.html | |||
@@ -248,4 +248,20 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
248 | <span class="brace">}</span> | 248 | <span class="brace">}</span> |
249 | 249 | ||
250 | <span class="keyword unsafe">unsafe</span> <span class="keyword">trait</span> <span class="trait declaration unsafe">Dangerous</span> <span class="brace">{</span><span class="brace">}</span> | 250 | <span class="keyword unsafe">unsafe</span> <span class="keyword">trait</span> <span class="trait declaration unsafe">Dangerous</span> <span class="brace">{</span><span class="brace">}</span> |
251 | <span class="keyword">impl</span> <span class="trait unsafe">Dangerous</span> <span class="keyword">for</span> <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span></code></pre> \ No newline at end of file | 251 | <span class="keyword">impl</span> <span class="trait unsafe">Dangerous</span> <span class="keyword">for</span> <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span> |
252 | |||
253 | <span class="keyword">fn</span> <span class="function declaration">use_foo_items</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span> | ||
254 | <span class="keyword">let</span> <span class="variable declaration">bob</span> <span class="operator">=</span> <span class="module library">foo</span><span class="operator">::</span><span class="struct library">Person</span> <span class="brace">{</span> | ||
255 | <span class="field library">name</span><span class="colon">:</span> <span class="string_literal">"Bob"</span><span class="comma">,</span> | ||
256 | <span class="field library">age</span><span class="colon">:</span> <span class="module library">foo</span><span class="operator">::</span><span class="module library">consts</span><span class="operator">::</span><span class="constant library">NUMBER</span><span class="comma">,</span> | ||
257 | <span class="brace">}</span><span class="semicolon">;</span> | ||
258 | |||
259 | <span class="keyword">let</span> <span class="variable declaration">control_flow</span> <span class="operator">=</span> <span class="module library">foo</span><span class="operator">::</span><span class="function library">identity</span><span class="parenthesis">(</span><span class="module library">foo</span><span class="operator">::</span><span class="enum library">ControlFlow</span><span class="operator">::</span><span class="enum_variant library">Continue</span><span class="parenthesis">)</span><span class="semicolon">;</span> | ||
260 | |||
261 | <span class="keyword control">if</span> <span class="variable">control_flow</span><span class="operator">.</span><span class="function associated consuming library">should_die</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span> | ||
262 | foo::<span class="macro">die!</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span> | ||
263 | <span class="brace">}</span> | ||
264 | <span class="brace">}</span> | ||
265 | |||
266 | |||
267 | </code></pre> \ No newline at end of file | ||
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 9ce26e930..662b53481 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs | |||
@@ -10,6 +10,7 @@ use crate::{fixture, FileRange, HlTag, TextRange}; | |||
10 | fn test_highlighting() { | 10 | fn test_highlighting() { |
11 | check_highlighting( | 11 | check_highlighting( |
12 | r#" | 12 | r#" |
13 | //- /main.rs crate:main deps:foo | ||
13 | use inner::{self as inner_mod}; | 14 | use inner::{self as inner_mod}; |
14 | mod inner {} | 15 | mod inner {} |
15 | 16 | ||
@@ -222,6 +223,49 @@ async fn async_main() { | |||
222 | 223 | ||
223 | unsafe trait Dangerous {} | 224 | unsafe trait Dangerous {} |
224 | impl Dangerous for () {} | 225 | impl Dangerous for () {} |
226 | |||
227 | fn use_foo_items() { | ||
228 | let bob = foo::Person { | ||
229 | name: "Bob", | ||
230 | age: foo::consts::NUMBER, | ||
231 | }; | ||
232 | |||
233 | let control_flow = foo::identity(foo::ControlFlow::Continue); | ||
234 | |||
235 | if control_flow.should_die() { | ||
236 | foo::die!(); | ||
237 | } | ||
238 | } | ||
239 | |||
240 | |||
241 | //- /foo.rs crate:foo | ||
242 | pub struct Person { | ||
243 | pub name: &'static str, | ||
244 | pub age: u8, | ||
245 | } | ||
246 | |||
247 | pub enum ControlFlow { | ||
248 | Continue, | ||
249 | Die, | ||
250 | } | ||
251 | |||
252 | impl ControlFlow { | ||
253 | pub fn should_die(self) -> bool { | ||
254 | matches!(self, ControlFlow::Die) | ||
255 | } | ||
256 | } | ||
257 | |||
258 | pub fn identity<T>(x: T) -> T { x } | ||
259 | |||
260 | pub mod consts { | ||
261 | pub const NUMBER: i64 = 92; | ||
262 | } | ||
263 | |||
264 | macro_rules! die { | ||
265 | () => { | ||
266 | panic!(); | ||
267 | }; | ||
268 | } | ||
225 | "# | 269 | "# |
226 | .trim(), | 270 | .trim(), |
227 | expect_file!["./test_data/highlighting.html"], | 271 | expect_file!["./test_data/highlighting.html"], |
diff --git a/crates/ide_assists/src/handlers/expand_glob_import.rs b/crates/ide_assists/src/handlers/expand_glob_import.rs index 79cb08d69..6da880b52 100644 --- a/crates/ide_assists/src/handlers/expand_glob_import.rs +++ b/crates/ide_assists/src/handlers/expand_glob_import.rs | |||
@@ -55,7 +55,7 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Opti | |||
55 | let refs_in_target = find_refs_in_mod(ctx, target_module, Some(current_module))?; | 55 | let refs_in_target = find_refs_in_mod(ctx, target_module, Some(current_module))?; |
56 | let imported_defs = find_imported_defs(ctx, star)?; | 56 | let imported_defs = find_imported_defs(ctx, star)?; |
57 | 57 | ||
58 | let target = parent.clone().either(|n| n.syntax().clone(), |n| n.syntax().clone()); | 58 | let target = parent.either(|n| n.syntax().clone(), |n| n.syntax().clone()); |
59 | acc.add( | 59 | acc.add( |
60 | AssistId("expand_glob_import", AssistKind::RefactorRewrite), | 60 | AssistId("expand_glob_import", AssistKind::RefactorRewrite), |
61 | "Expand glob import", | 61 | "Expand glob import", |
diff --git a/crates/ide_assists/src/handlers/fill_match_arms.rs b/crates/ide_assists/src/handlers/fill_match_arms.rs index 97435f021..3d2cd739a 100644 --- a/crates/ide_assists/src/handlers/fill_match_arms.rs +++ b/crates/ide_assists/src/handlers/fill_match_arms.rs | |||
@@ -31,8 +31,8 @@ use crate::{ | |||
31 | // | 31 | // |
32 | // fn handle(action: Action) { | 32 | // fn handle(action: Action) { |
33 | // match action { | 33 | // match action { |
34 | // $0Action::Move { distance } => {} | 34 | // $0Action::Move { distance } => todo!(), |
35 | // Action::Stop => {} | 35 | // Action::Stop => todo!(), |
36 | // } | 36 | // } |
37 | // } | 37 | // } |
38 | // ``` | 38 | // ``` |
@@ -129,7 +129,7 @@ pub(crate) fn fill_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option< | |||
129 | |builder| { | 129 | |builder| { |
130 | let new_match_arm_list = match_arm_list.clone_for_update(); | 130 | let new_match_arm_list = match_arm_list.clone_for_update(); |
131 | let missing_arms = missing_pats | 131 | let missing_arms = missing_pats |
132 | .map(|pat| make::match_arm(iter::once(pat), make::expr_empty_block())) | 132 | .map(|pat| make::match_arm(iter::once(pat), make::ext::expr_todo())) |
133 | .map(|it| it.clone_for_update()); | 133 | .map(|it| it.clone_for_update()); |
134 | 134 | ||
135 | let catch_all_arm = new_match_arm_list | 135 | let catch_all_arm = new_match_arm_list |
@@ -350,8 +350,8 @@ fn foo(a: bool) { | |||
350 | r#" | 350 | r#" |
351 | fn foo(a: bool) { | 351 | fn foo(a: bool) { |
352 | match a { | 352 | match a { |
353 | $0true => {} | 353 | $0true => todo!(), |
354 | false => {} | 354 | false => todo!(), |
355 | } | 355 | } |
356 | } | 356 | } |
357 | "#, | 357 | "#, |
@@ -373,7 +373,7 @@ fn foo(a: bool) { | |||
373 | fn foo(a: bool) { | 373 | fn foo(a: bool) { |
374 | match a { | 374 | match a { |
375 | true => {} | 375 | true => {} |
376 | $0false => {} | 376 | $0false => todo!(), |
377 | } | 377 | } |
378 | } | 378 | } |
379 | "#, | 379 | "#, |
@@ -410,10 +410,10 @@ fn foo(a: bool) { | |||
410 | r#" | 410 | r#" |
411 | fn foo(a: bool) { | 411 | fn foo(a: bool) { |
412 | match (a, a) { | 412 | match (a, a) { |
413 | $0(true, true) => {} | 413 | $0(true, true) => todo!(), |
414 | (true, false) => {} | 414 | (true, false) => todo!(), |
415 | (false, true) => {} | 415 | (false, true) => todo!(), |
416 | (false, false) => {} | 416 | (false, false) => todo!(), |
417 | } | 417 | } |
418 | } | 418 | } |
419 | "#, | 419 | "#, |
@@ -435,9 +435,9 @@ fn foo(a: bool) { | |||
435 | fn foo(a: bool) { | 435 | fn foo(a: bool) { |
436 | match (a, a) { | 436 | match (a, a) { |
437 | (false, true) => {} | 437 | (false, true) => {} |
438 | $0(true, true) => {} | 438 | $0(true, true) => todo!(), |
439 | (true, false) => {} | 439 | (true, false) => todo!(), |
440 | (false, false) => {} | 440 | (false, false) => todo!(), |
441 | } | 441 | } |
442 | } | 442 | } |
443 | "#, | 443 | "#, |
@@ -471,7 +471,7 @@ fn main() { | |||
471 | match A::As { | 471 | match A::As { |
472 | A::Bs { x, y: Some(_) } => {} | 472 | A::Bs { x, y: Some(_) } => {} |
473 | A::Cs(_, Some(_)) => {} | 473 | A::Cs(_, Some(_)) => {} |
474 | $0A::As => {} | 474 | $0A::As => todo!(), |
475 | } | 475 | } |
476 | } | 476 | } |
477 | "#, | 477 | "#, |
@@ -499,7 +499,7 @@ use Option::*; | |||
499 | fn main() { | 499 | fn main() { |
500 | match None { | 500 | match None { |
501 | None => {} | 501 | None => {} |
502 | Some(${0:_}) => {} | 502 | Some(${0:_}) => todo!(), |
503 | } | 503 | } |
504 | } | 504 | } |
505 | "#, | 505 | "#, |
@@ -523,7 +523,7 @@ enum A { As, Bs, Cs(Option<i32>) } | |||
523 | fn main() { | 523 | fn main() { |
524 | match A::As { | 524 | match A::As { |
525 | A::Cs(_) | A::Bs => {} | 525 | A::Cs(_) | A::Bs => {} |
526 | $0A::As => {} | 526 | $0A::As => todo!(), |
527 | } | 527 | } |
528 | } | 528 | } |
529 | "#, | 529 | "#, |
@@ -553,8 +553,8 @@ fn main() { | |||
553 | A::Bs if 0 < 1 => {} | 553 | A::Bs if 0 < 1 => {} |
554 | A::Ds(_value) => { let x = 1; } | 554 | A::Ds(_value) => { let x = 1; } |
555 | A::Es(B::Xs) => (), | 555 | A::Es(B::Xs) => (), |
556 | $0A::As => {} | 556 | $0A::As => todo!(), |
557 | A::Cs => {} | 557 | A::Cs => todo!(), |
558 | } | 558 | } |
559 | } | 559 | } |
560 | "#, | 560 | "#, |
@@ -580,7 +580,7 @@ fn main() { | |||
580 | match A::As { | 580 | match A::As { |
581 | A::As(_) => {} | 581 | A::As(_) => {} |
582 | a @ A::Bs(_) => {} | 582 | a @ A::Bs(_) => {} |
583 | A::Cs(${0:_}) => {} | 583 | A::Cs(${0:_}) => todo!(), |
584 | } | 584 | } |
585 | } | 585 | } |
586 | "#, | 586 | "#, |
@@ -605,11 +605,11 @@ enum A { As, Bs, Cs(String), Ds(String, String), Es { x: usize, y: usize } } | |||
605 | fn main() { | 605 | fn main() { |
606 | let a = A::As; | 606 | let a = A::As; |
607 | match a { | 607 | match a { |
608 | $0A::As => {} | 608 | $0A::As => todo!(), |
609 | A::Bs => {} | 609 | A::Bs => todo!(), |
610 | A::Cs(_) => {} | 610 | A::Cs(_) => todo!(), |
611 | A::Ds(_, _) => {} | 611 | A::Ds(_, _) => todo!(), |
612 | A::Es { x, y } => {} | 612 | A::Es { x, y } => todo!(), |
613 | } | 613 | } |
614 | } | 614 | } |
615 | "#, | 615 | "#, |
@@ -638,10 +638,10 @@ fn main() { | |||
638 | let a = A::One; | 638 | let a = A::One; |
639 | let b = B::One; | 639 | let b = B::One; |
640 | match (a, b) { | 640 | match (a, b) { |
641 | $0(A::One, B::One) => {} | 641 | $0(A::One, B::One) => todo!(), |
642 | (A::One, B::Two) => {} | 642 | (A::One, B::Two) => todo!(), |
643 | (A::Two, B::One) => {} | 643 | (A::Two, B::One) => todo!(), |
644 | (A::Two, B::Two) => {} | 644 | (A::Two, B::Two) => todo!(), |
645 | } | 645 | } |
646 | } | 646 | } |
647 | "#, | 647 | "#, |
@@ -670,10 +670,10 @@ fn main() { | |||
670 | let a = A::One; | 670 | let a = A::One; |
671 | let b = B::One; | 671 | let b = B::One; |
672 | match (&a, &b) { | 672 | match (&a, &b) { |
673 | $0(A::One, B::One) => {} | 673 | $0(A::One, B::One) => todo!(), |
674 | (A::One, B::Two) => {} | 674 | (A::One, B::Two) => todo!(), |
675 | (A::Two, B::One) => {} | 675 | (A::Two, B::One) => todo!(), |
676 | (A::Two, B::Two) => {} | 676 | (A::Two, B::Two) => todo!(), |
677 | } | 677 | } |
678 | } | 678 | } |
679 | "#, | 679 | "#, |
@@ -705,9 +705,9 @@ fn main() { | |||
705 | let b = B::One; | 705 | let b = B::One; |
706 | match (a, b) { | 706 | match (a, b) { |
707 | (A::Two, B::One) => {} | 707 | (A::Two, B::One) => {} |
708 | $0(A::One, B::One) => {} | 708 | $0(A::One, B::One) => todo!(), |
709 | (A::One, B::Two) => {} | 709 | (A::One, B::Two) => todo!(), |
710 | (A::Two, B::Two) => {} | 710 | (A::Two, B::Two) => todo!(), |
711 | } | 711 | } |
712 | } | 712 | } |
713 | "#, | 713 | "#, |
@@ -736,7 +736,7 @@ fn main() { | |||
736 | match (a, b) { | 736 | match (a, b) { |
737 | (Some(_), _) => {} | 737 | (Some(_), _) => {} |
738 | (None, Some(_)) => {} | 738 | (None, Some(_)) => {} |
739 | $0(None, None) => {} | 739 | $0(None, None) => todo!(), |
740 | } | 740 | } |
741 | } | 741 | } |
742 | "#, | 742 | "#, |
@@ -801,8 +801,8 @@ enum A { One, Two } | |||
801 | fn main() { | 801 | fn main() { |
802 | let a = A::One; | 802 | let a = A::One; |
803 | match (a, ) { | 803 | match (a, ) { |
804 | $0(A::One,) => {} | 804 | $0(A::One,) => todo!(), |
805 | (A::Two,) => {} | 805 | (A::Two,) => todo!(), |
806 | } | 806 | } |
807 | } | 807 | } |
808 | "#, | 808 | "#, |
@@ -826,7 +826,7 @@ enum A { As } | |||
826 | 826 | ||
827 | fn foo(a: &A) { | 827 | fn foo(a: &A) { |
828 | match a { | 828 | match a { |
829 | $0A::As => {} | 829 | $0A::As => todo!(), |
830 | } | 830 | } |
831 | } | 831 | } |
832 | "#, | 832 | "#, |
@@ -851,7 +851,7 @@ enum A { | |||
851 | 851 | ||
852 | fn foo(a: &mut A) { | 852 | fn foo(a: &mut A) { |
853 | match a { | 853 | match a { |
854 | $0A::Es { x, y } => {} | 854 | $0A::Es { x, y } => todo!(), |
855 | } | 855 | } |
856 | } | 856 | } |
857 | "#, | 857 | "#, |
@@ -891,8 +891,8 @@ enum E { X, Y } | |||
891 | 891 | ||
892 | fn main() { | 892 | fn main() { |
893 | match E::X { | 893 | match E::X { |
894 | $0E::X => {} | 894 | $0E::X => todo!(), |
895 | E::Y => {} | 895 | E::Y => todo!(), |
896 | } | 896 | } |
897 | } | 897 | } |
898 | "#, | 898 | "#, |
@@ -919,8 +919,8 @@ use foo::E::X; | |||
919 | 919 | ||
920 | fn main() { | 920 | fn main() { |
921 | match X { | 921 | match X { |
922 | $0X => {} | 922 | $0X => todo!(), |
923 | foo::E::Y => {} | 923 | foo::E::Y => todo!(), |
924 | } | 924 | } |
925 | } | 925 | } |
926 | "#, | 926 | "#, |
@@ -947,7 +947,7 @@ fn foo(a: A) { | |||
947 | match a { | 947 | match a { |
948 | // foo bar baz | 948 | // foo bar baz |
949 | A::One => {} | 949 | A::One => {} |
950 | $0A::Two => {} | 950 | $0A::Two => todo!(), |
951 | // This is where the rest should be | 951 | // This is where the rest should be |
952 | } | 952 | } |
953 | } | 953 | } |
@@ -971,8 +971,8 @@ fn foo(a: A) { | |||
971 | enum A { One, Two } | 971 | enum A { One, Two } |
972 | fn foo(a: A) { | 972 | fn foo(a: A) { |
973 | match a { | 973 | match a { |
974 | $0A::One => {} | 974 | $0A::One => todo!(), |
975 | A::Two => {} | 975 | A::Two => todo!(), |
976 | // foo bar baz | 976 | // foo bar baz |
977 | } | 977 | } |
978 | } | 978 | } |
@@ -996,8 +996,8 @@ fn foo(a: A) { | |||
996 | enum A { One, Two, } | 996 | enum A { One, Two, } |
997 | fn foo(a: A) { | 997 | fn foo(a: A) { |
998 | match a { | 998 | match a { |
999 | $0A::One => {} | 999 | $0A::One => todo!(), |
1000 | A::Two => {} | 1000 | A::Two => todo!(), |
1001 | } | 1001 | } |
1002 | } | 1002 | } |
1003 | "#, | 1003 | "#, |
@@ -1021,8 +1021,8 @@ fn foo(opt: Option<i32>) { | |||
1021 | r#" | 1021 | r#" |
1022 | fn foo(opt: Option<i32>) { | 1022 | fn foo(opt: Option<i32>) { |
1023 | match opt { | 1023 | match opt { |
1024 | Some(${0:_}) => {} | 1024 | Some(${0:_}) => todo!(), |
1025 | None => {} | 1025 | None => todo!(), |
1026 | } | 1026 | } |
1027 | } | 1027 | } |
1028 | "#, | 1028 | "#, |
@@ -1054,9 +1054,9 @@ enum Test { | |||
1054 | 1054 | ||
1055 | fn foo(t: Test) { | 1055 | fn foo(t: Test) { |
1056 | m!(match t { | 1056 | m!(match t { |
1057 | $0Test::A => {} | 1057 | $0Test::A => todo!(), |
1058 | Test::B => {} | 1058 | Test::B => todo!(), |
1059 | Test::C => {} | 1059 | Test::C => todo!(), |
1060 | }); | 1060 | }); |
1061 | }"#, | 1061 | }"#, |
1062 | ); | 1062 | ); |
@@ -1076,4 +1076,44 @@ fn foo(tuple: (A, A)) { | |||
1076 | "#, | 1076 | "#, |
1077 | ); | 1077 | ); |
1078 | } | 1078 | } |
1079 | |||
1080 | #[test] | ||
1081 | fn adds_comma_before_new_arms() { | ||
1082 | check_assist( | ||
1083 | fill_match_arms, | ||
1084 | r#" | ||
1085 | fn foo(t: bool) { | ||
1086 | match $0t { | ||
1087 | true => 1 + 2 | ||
1088 | } | ||
1089 | }"#, | ||
1090 | r#" | ||
1091 | fn foo(t: bool) { | ||
1092 | match t { | ||
1093 | true => 1 + 2, | ||
1094 | $0false => todo!(), | ||
1095 | } | ||
1096 | }"#, | ||
1097 | ); | ||
1098 | } | ||
1099 | |||
1100 | #[test] | ||
1101 | fn does_not_add_extra_comma() { | ||
1102 | check_assist( | ||
1103 | fill_match_arms, | ||
1104 | r#" | ||
1105 | fn foo(t: bool) { | ||
1106 | match $0t { | ||
1107 | true => 1 + 2, | ||
1108 | } | ||
1109 | }"#, | ||
1110 | r#" | ||
1111 | fn foo(t: bool) { | ||
1112 | match t { | ||
1113 | true => 1 + 2, | ||
1114 | $0false => todo!(), | ||
1115 | } | ||
1116 | }"#, | ||
1117 | ); | ||
1118 | } | ||
1079 | } | 1119 | } |
diff --git a/crates/ide_assists/src/tests/generated.rs b/crates/ide_assists/src/tests/generated.rs index 8a9b0777c..de5d9e55a 100644 --- a/crates/ide_assists/src/tests/generated.rs +++ b/crates/ide_assists/src/tests/generated.rs | |||
@@ -455,8 +455,8 @@ enum Action { Move { distance: u32 }, Stop } | |||
455 | 455 | ||
456 | fn handle(action: Action) { | 456 | fn handle(action: Action) { |
457 | match action { | 457 | match action { |
458 | $0Action::Move { distance } => {} | 458 | $0Action::Move { distance } => todo!(), |
459 | Action::Stop => {} | 459 | Action::Stop => todo!(), |
460 | } | 460 | } |
461 | } | 461 | } |
462 | "#####, | 462 | "#####, |
diff --git a/crates/ide_completion/Cargo.toml b/crates/ide_completion/Cargo.toml index 6bd8a5500..ba81c9e04 100644 --- a/crates/ide_completion/Cargo.toml +++ b/crates/ide_completion/Cargo.toml | |||
@@ -15,6 +15,7 @@ itertools = "0.10.0" | |||
15 | log = "0.4.8" | 15 | log = "0.4.8" |
16 | rustc-hash = "1.1.0" | 16 | rustc-hash = "1.1.0" |
17 | either = "1.6.1" | 17 | either = "1.6.1" |
18 | once_cell = "1.7" | ||
18 | 19 | ||
19 | stdx = { path = "../stdx", version = "0.0.0" } | 20 | stdx = { path = "../stdx", version = "0.0.0" } |
20 | syntax = { path = "../syntax", version = "0.0.0" } | 21 | syntax = { path = "../syntax", version = "0.0.0" } |
diff --git a/crates/ide_completion/src/completions/attribute.rs b/crates/ide_completion/src/completions/attribute.rs index b1505c74b..13d5b90c9 100644 --- a/crates/ide_completion/src/completions/attribute.rs +++ b/crates/ide_completion/src/completions/attribute.rs | |||
@@ -3,9 +3,9 @@ | |||
3 | //! This module uses a bit of static metadata to provide completions | 3 | //! This module uses a bit of static metadata to provide completions |
4 | //! for built-in attributes. | 4 | //! for built-in attributes. |
5 | 5 | ||
6 | use itertools::Itertools; | 6 | use once_cell::sync::Lazy; |
7 | use rustc_hash::FxHashSet; | 7 | use rustc_hash::{FxHashMap, FxHashSet}; |
8 | use syntax::{ast, AstNode, T}; | 8 | use syntax::{ast, AstNode, NodeOrToken, SyntaxKind, T}; |
9 | 9 | ||
10 | use crate::{ | 10 | use crate::{ |
11 | context::CompletionContext, | 11 | context::CompletionContext, |
@@ -14,33 +14,40 @@ use crate::{ | |||
14 | Completions, | 14 | Completions, |
15 | }; | 15 | }; |
16 | 16 | ||
17 | pub(crate) fn complete_attribute(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> { | 17 | mod derive; |
18 | if ctx.mod_declaration_under_caret.is_some() { | 18 | mod lint; |
19 | return None; | 19 | pub(crate) use self::lint::LintCompletion; |
20 | } | ||
21 | 20 | ||
21 | pub(crate) fn complete_attribute(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> { | ||
22 | let attribute = ctx.attribute_under_caret.as_ref()?; | 22 | let attribute = ctx.attribute_under_caret.as_ref()?; |
23 | match (attribute.path(), attribute.token_tree()) { | 23 | match (attribute.path().and_then(|p| p.as_single_name_ref()), attribute.token_tree()) { |
24 | (Some(path), Some(token_tree)) => { | 24 | (Some(path), Some(token_tree)) => match path.text().as_str() { |
25 | let path = path.syntax().text(); | 25 | "derive" => derive::complete_derive(acc, ctx, token_tree), |
26 | if path == "derive" { | 26 | "feature" => lint::complete_lint(acc, ctx, token_tree, FEATURES), |
27 | complete_derive(acc, ctx, token_tree) | 27 | "allow" | "warn" | "deny" | "forbid" => { |
28 | } else if path == "feature" { | 28 | lint::complete_lint(acc, ctx, token_tree.clone(), lint::DEFAULT_LINT_COMPLETIONS); |
29 | complete_lint(acc, ctx, token_tree, FEATURES) | 29 | lint::complete_lint(acc, ctx, token_tree, CLIPPY_LINTS); |
30 | } else if path == "allow" || path == "warn" || path == "deny" || path == "forbid" { | ||
31 | complete_lint(acc, ctx, token_tree.clone(), DEFAULT_LINT_COMPLETIONS); | ||
32 | complete_lint(acc, ctx, token_tree, CLIPPY_LINTS); | ||
33 | } | 30 | } |
34 | } | 31 | _ => (), |
35 | (_, Some(_token_tree)) => {} | 32 | }, |
36 | _ => complete_attribute_start(acc, ctx, attribute), | 33 | (None, Some(_)) => (), |
34 | _ => complete_new_attribute(acc, ctx, attribute), | ||
37 | } | 35 | } |
38 | Some(()) | 36 | Some(()) |
39 | } | 37 | } |
40 | 38 | ||
41 | fn complete_attribute_start(acc: &mut Completions, ctx: &CompletionContext, attribute: &ast::Attr) { | 39 | fn complete_new_attribute(acc: &mut Completions, ctx: &CompletionContext, attribute: &ast::Attr) { |
40 | let attribute_annotated_item_kind = attribute.syntax().parent().map(|it| it.kind()); | ||
41 | let attributes = attribute_annotated_item_kind.and_then(|kind| { | ||
42 | if ast::Expr::can_cast(kind) { | ||
43 | Some(EXPR_ATTRIBUTES) | ||
44 | } else { | ||
45 | KIND_TO_ATTRIBUTES.get(&kind).copied() | ||
46 | } | ||
47 | }); | ||
42 | let is_inner = attribute.kind() == ast::AttrKind::Inner; | 48 | let is_inner = attribute.kind() == ast::AttrKind::Inner; |
43 | for attr_completion in ATTRIBUTES.iter().filter(|compl| is_inner || !compl.prefer_inner) { | 49 | |
50 | let add_completion = |attr_completion: &AttrCompletion| { | ||
44 | let mut item = CompletionItem::new( | 51 | let mut item = CompletionItem::new( |
45 | CompletionKind::Attribute, | 52 | CompletionKind::Attribute, |
46 | ctx.source_range(), | 53 | ctx.source_range(), |
@@ -56,9 +63,19 @@ fn complete_attribute_start(acc: &mut Completions, ctx: &CompletionContext, attr | |||
56 | item.insert_snippet(cap, snippet); | 63 | item.insert_snippet(cap, snippet); |
57 | } | 64 | } |
58 | 65 | ||
59 | if attribute.kind() == ast::AttrKind::Inner || !attr_completion.prefer_inner { | 66 | if is_inner || !attr_completion.prefer_inner { |
60 | acc.add(item.build()); | 67 | acc.add(item.build()); |
61 | } | 68 | } |
69 | }; | ||
70 | |||
71 | match attributes { | ||
72 | Some(applicable) => applicable | ||
73 | .iter() | ||
74 | .flat_map(|name| ATTRIBUTES.binary_search_by(|attr| attr.key().cmp(name)).ok()) | ||
75 | .flat_map(|idx| ATTRIBUTES.get(idx)) | ||
76 | .for_each(add_completion), | ||
77 | None if is_inner => ATTRIBUTES.iter().for_each(add_completion), | ||
78 | None => ATTRIBUTES.iter().filter(|compl| !compl.prefer_inner).for_each(add_completion), | ||
62 | } | 79 | } |
63 | } | 80 | } |
64 | 81 | ||
@@ -70,6 +87,10 @@ struct AttrCompletion { | |||
70 | } | 87 | } |
71 | 88 | ||
72 | impl AttrCompletion { | 89 | impl AttrCompletion { |
90 | fn key(&self) -> &'static str { | ||
91 | self.lookup.unwrap_or(self.label) | ||
92 | } | ||
93 | |||
73 | const fn prefer_inner(self) -> AttrCompletion { | 94 | const fn prefer_inner(self) -> AttrCompletion { |
74 | AttrCompletion { prefer_inner: true, ..self } | 95 | AttrCompletion { prefer_inner: true, ..self } |
75 | } | 96 | } |
@@ -83,26 +104,119 @@ const fn attr( | |||
83 | AttrCompletion { label, lookup, snippet, prefer_inner: false } | 104 | AttrCompletion { label, lookup, snippet, prefer_inner: false } |
84 | } | 105 | } |
85 | 106 | ||
107 | macro_rules! attrs { | ||
108 | // attributes applicable to all items | ||
109 | [@ { item $($tt:tt)* } {$($acc:tt)*}] => { | ||
110 | attrs!(@ { $($tt)* } { $($acc)*, "deprecated", "doc", "dochidden", "docalias", "must_use", "no_mangle" }) | ||
111 | }; | ||
112 | // attributes applicable to all adts | ||
113 | [@ { adt $($tt:tt)* } {$($acc:tt)*}] => { | ||
114 | attrs!(@ { $($tt)* } { $($acc)*, "derive", "repr" }) | ||
115 | }; | ||
116 | // attributes applicable to all linkable things aka functions/statics | ||
117 | [@ { linkable $($tt:tt)* } {$($acc:tt)*}] => { | ||
118 | attrs!(@ { $($tt)* } { $($acc)*, "export_name", "link_name", "link_section" }) | ||
119 | }; | ||
120 | // error fallback for nicer error message | ||
121 | [@ { $ty:ident $($tt:tt)* } {$($acc:tt)*}] => { | ||
122 | compile_error!(concat!("unknown attr subtype ", stringify!($ty))) | ||
123 | }; | ||
124 | // general push down accumulation | ||
125 | [@ { $lit:literal $($tt:tt)*} {$($acc:tt)*}] => { | ||
126 | attrs!(@ { $($tt)* } { $($acc)*, $lit }) | ||
127 | }; | ||
128 | [@ {$($tt:tt)+} {$($tt2:tt)*}] => { | ||
129 | compile_error!(concat!("Unexpected input ", stringify!($($tt)+))) | ||
130 | }; | ||
131 | // final output construction | ||
132 | [@ {} {$($tt:tt)*}] => { &[$($tt)*] as _ }; | ||
133 | // starting matcher | ||
134 | [$($tt:tt),*] => { | ||
135 | attrs!(@ { $($tt)* } { "allow", "cfg", "cfg_attr", "deny", "forbid", "warn" }) | ||
136 | }; | ||
137 | } | ||
138 | |||
139 | #[rustfmt::skip] | ||
140 | static KIND_TO_ATTRIBUTES: Lazy<FxHashMap<SyntaxKind, &[&str]>> = Lazy::new(|| { | ||
141 | use SyntaxKind::*; | ||
142 | std::array::IntoIter::new([ | ||
143 | ( | ||
144 | SOURCE_FILE, | ||
145 | attrs!( | ||
146 | item, | ||
147 | "crate_name", "feature", "no_implicit_prelude", "no_main", "no_std", | ||
148 | "recursion_limit", "type_length_limit", "windows_subsystem" | ||
149 | ), | ||
150 | ), | ||
151 | (MODULE, attrs!(item, "no_implicit_prelude", "path")), | ||
152 | (ITEM_LIST, attrs!(item, "no_implicit_prelude")), | ||
153 | (MACRO_RULES, attrs!(item, "macro_export", "macro_use")), | ||
154 | (MACRO_DEF, attrs!(item)), | ||
155 | (EXTERN_CRATE, attrs!(item, "macro_use", "no_link")), | ||
156 | (USE, attrs!(item)), | ||
157 | (TYPE_ALIAS, attrs!(item)), | ||
158 | (STRUCT, attrs!(item, adt, "non_exhaustive")), | ||
159 | (ENUM, attrs!(item, adt, "non_exhaustive")), | ||
160 | (UNION, attrs!(item, adt)), | ||
161 | (CONST, attrs!(item)), | ||
162 | ( | ||
163 | FN, | ||
164 | attrs!( | ||
165 | item, linkable, | ||
166 | "cold", "ignore", "inline", "must_use", "panic_handler", "proc_macro", | ||
167 | "proc_macro_derive", "proc_macro_attribute", "should_panic", "target_feature", | ||
168 | "test", "track_caller" | ||
169 | ), | ||
170 | ), | ||
171 | (STATIC, attrs!(item, linkable, "global_allocator", "used")), | ||
172 | (TRAIT, attrs!(item, "must_use")), | ||
173 | (IMPL, attrs!(item, "automatically_derived")), | ||
174 | (ASSOC_ITEM_LIST, attrs!(item)), | ||
175 | (EXTERN_BLOCK, attrs!(item, "link")), | ||
176 | (EXTERN_ITEM_LIST, attrs!(item, "link")), | ||
177 | (MACRO_CALL, attrs!()), | ||
178 | (SELF_PARAM, attrs!()), | ||
179 | (PARAM, attrs!()), | ||
180 | (RECORD_FIELD, attrs!()), | ||
181 | (VARIANT, attrs!("non_exhaustive")), | ||
182 | (TYPE_PARAM, attrs!()), | ||
183 | (CONST_PARAM, attrs!()), | ||
184 | (LIFETIME_PARAM, attrs!()), | ||
185 | (LET_STMT, attrs!()), | ||
186 | (EXPR_STMT, attrs!()), | ||
187 | (LITERAL, attrs!()), | ||
188 | (RECORD_EXPR_FIELD_LIST, attrs!()), | ||
189 | (RECORD_EXPR_FIELD, attrs!()), | ||
190 | (MATCH_ARM_LIST, attrs!()), | ||
191 | (MATCH_ARM, attrs!()), | ||
192 | (IDENT_PAT, attrs!()), | ||
193 | (RECORD_PAT_FIELD, attrs!()), | ||
194 | ]) | ||
195 | .collect() | ||
196 | }); | ||
197 | const EXPR_ATTRIBUTES: &[&str] = attrs!(); | ||
198 | |||
86 | /// https://doc.rust-lang.org/reference/attributes.html#built-in-attributes-index | 199 | /// https://doc.rust-lang.org/reference/attributes.html#built-in-attributes-index |
200 | // Keep these sorted for the binary search! | ||
87 | const ATTRIBUTES: &[AttrCompletion] = &[ | 201 | const ATTRIBUTES: &[AttrCompletion] = &[ |
88 | attr("allow(…)", Some("allow"), Some("allow(${0:lint})")), | 202 | attr("allow(…)", Some("allow"), Some("allow(${0:lint})")), |
89 | attr("automatically_derived", None, None), | 203 | attr("automatically_derived", None, None), |
90 | attr("cfg_attr(…)", Some("cfg_attr"), Some("cfg_attr(${1:predicate}, ${0:attr})")), | ||
91 | attr("cfg(…)", Some("cfg"), Some("cfg(${0:predicate})")), | 204 | attr("cfg(…)", Some("cfg"), Some("cfg(${0:predicate})")), |
205 | attr("cfg_attr(…)", Some("cfg_attr"), Some("cfg_attr(${1:predicate}, ${0:attr})")), | ||
92 | attr("cold", None, None), | 206 | attr("cold", None, None), |
93 | attr(r#"crate_name = """#, Some("crate_name"), Some(r#"crate_name = "${0:crate_name}""#)) | 207 | attr(r#"crate_name = """#, Some("crate_name"), Some(r#"crate_name = "${0:crate_name}""#)) |
94 | .prefer_inner(), | 208 | .prefer_inner(), |
95 | attr("deny(…)", Some("deny"), Some("deny(${0:lint})")), | 209 | attr("deny(…)", Some("deny"), Some("deny(${0:lint})")), |
96 | attr(r#"deprecated"#, Some("deprecated"), Some(r#"deprecated"#)), | 210 | attr(r#"deprecated"#, Some("deprecated"), Some(r#"deprecated"#)), |
97 | attr("derive(…)", Some("derive"), Some(r#"derive(${0:Debug})"#)), | 211 | attr("derive(…)", Some("derive"), Some(r#"derive(${0:Debug})"#)), |
212 | attr(r#"doc = "…""#, Some("doc"), Some(r#"doc = "${0:docs}""#)), | ||
213 | attr(r#"doc(alias = "…")"#, Some("docalias"), Some(r#"doc(alias = "${0:docs}")"#)), | ||
214 | attr(r#"doc(hidden)"#, Some("dochidden"), Some(r#"doc(hidden)"#)), | ||
98 | attr( | 215 | attr( |
99 | r#"export_name = "…""#, | 216 | r#"export_name = "…""#, |
100 | Some("export_name"), | 217 | Some("export_name"), |
101 | Some(r#"export_name = "${0:exported_symbol_name}""#), | 218 | Some(r#"export_name = "${0:exported_symbol_name}""#), |
102 | ), | 219 | ), |
103 | attr(r#"doc(alias = "…")"#, Some("docalias"), Some(r#"doc(alias = "${0:docs}")"#)), | ||
104 | attr(r#"doc = "…""#, Some("doc"), Some(r#"doc = "${0:docs}""#)), | ||
105 | attr(r#"doc(hidden)"#, Some("dochidden"), Some(r#"doc(hidden)"#)), | ||
106 | attr("feature(…)", Some("feature"), Some("feature(${0:flag})")).prefer_inner(), | 220 | attr("feature(…)", Some("feature"), Some("feature(${0:flag})")).prefer_inner(), |
107 | attr("forbid(…)", Some("forbid"), Some("forbid(${0:lint})")), | 221 | attr("forbid(…)", Some("forbid"), Some("forbid(${0:lint})")), |
108 | // FIXME: resolve through macro resolution? | 222 | // FIXME: resolve through macro resolution? |
@@ -119,8 +233,8 @@ const ATTRIBUTES: &[AttrCompletion] = &[ | |||
119 | attr("macro_export", None, None), | 233 | attr("macro_export", None, None), |
120 | attr("macro_use", None, None), | 234 | attr("macro_use", None, None), |
121 | attr(r#"must_use"#, Some("must_use"), Some(r#"must_use"#)), | 235 | attr(r#"must_use"#, Some("must_use"), Some(r#"must_use"#)), |
122 | attr("no_link", None, None).prefer_inner(), | ||
123 | attr("no_implicit_prelude", None, None).prefer_inner(), | 236 | attr("no_implicit_prelude", None, None).prefer_inner(), |
237 | attr("no_link", None, None).prefer_inner(), | ||
124 | attr("no_main", None, None).prefer_inner(), | 238 | attr("no_main", None, None).prefer_inner(), |
125 | attr("no_mangle", None, None), | 239 | attr("no_mangle", None, None), |
126 | attr("no_std", None, None).prefer_inner(), | 240 | attr("no_std", None, None).prefer_inner(), |
@@ -153,412 +267,492 @@ const ATTRIBUTES: &[AttrCompletion] = &[ | |||
153 | .prefer_inner(), | 267 | .prefer_inner(), |
154 | ]; | 268 | ]; |
155 | 269 | ||
156 | fn complete_derive(acc: &mut Completions, ctx: &CompletionContext, derive_input: ast::TokenTree) { | 270 | fn parse_comma_sep_input(derive_input: ast::TokenTree) -> Option<FxHashSet<String>> { |
157 | if let Ok(existing_derives) = parse_comma_sep_input(derive_input) { | 271 | let (l_paren, r_paren) = derive_input.l_paren_token().zip(derive_input.r_paren_token())?; |
158 | for derive_completion in DEFAULT_DERIVE_COMPLETIONS | 272 | let mut input_derives = FxHashSet::default(); |
159 | .iter() | 273 | let mut tokens = derive_input |
160 | .filter(|completion| !existing_derives.contains(completion.label)) | 274 | .syntax() |
161 | { | 275 | .children_with_tokens() |
162 | let mut components = vec![derive_completion.label]; | 276 | .filter_map(NodeOrToken::into_token) |
163 | components.extend( | 277 | .skip_while(|token| token != &l_paren) |
164 | derive_completion | 278 | .skip(1) |
165 | .dependencies | 279 | .take_while(|token| token != &r_paren) |
166 | .iter() | 280 | .peekable(); |
167 | .filter(|&&dependency| !existing_derives.contains(dependency)), | 281 | let mut input = String::new(); |
168 | ); | 282 | while tokens.peek().is_some() { |
169 | let lookup = components.join(", "); | 283 | for token in tokens.by_ref().take_while(|t| t.kind() != T![,]) { |
170 | let label = components.iter().rev().join(", "); | 284 | input.push_str(token.text()); |
171 | let mut item = | ||
172 | CompletionItem::new(CompletionKind::Attribute, ctx.source_range(), label); | ||
173 | item.lookup_by(lookup).kind(CompletionItemKind::Attribute); | ||
174 | item.add_to(acc); | ||
175 | } | ||
176 | |||
177 | for custom_derive_name in get_derive_names_in_scope(ctx).difference(&existing_derives) { | ||
178 | let mut item = CompletionItem::new( | ||
179 | CompletionKind::Attribute, | ||
180 | ctx.source_range(), | ||
181 | custom_derive_name, | ||
182 | ); | ||
183 | item.kind(CompletionItemKind::Attribute); | ||
184 | item.add_to(acc); | ||
185 | } | 285 | } |
186 | } | ||
187 | } | ||
188 | 286 | ||
189 | fn complete_lint( | 287 | if !input.is_empty() { |
190 | acc: &mut Completions, | 288 | input_derives.insert(input.trim().to_owned()); |
191 | ctx: &CompletionContext, | ||
192 | derive_input: ast::TokenTree, | ||
193 | lints_completions: &[LintCompletion], | ||
194 | ) { | ||
195 | if let Ok(existing_lints) = parse_comma_sep_input(derive_input) { | ||
196 | for lint_completion in lints_completions | ||
197 | .into_iter() | ||
198 | .filter(|completion| !existing_lints.contains(completion.label)) | ||
199 | { | ||
200 | let mut item = CompletionItem::new( | ||
201 | CompletionKind::Attribute, | ||
202 | ctx.source_range(), | ||
203 | lint_completion.label, | ||
204 | ); | ||
205 | item.kind(CompletionItemKind::Attribute).detail(lint_completion.description); | ||
206 | item.add_to(acc) | ||
207 | } | 289 | } |
208 | } | ||
209 | } | ||
210 | |||
211 | fn parse_comma_sep_input(derive_input: ast::TokenTree) -> Result<FxHashSet<String>, ()> { | ||
212 | match (derive_input.left_delimiter_token(), derive_input.right_delimiter_token()) { | ||
213 | (Some(left_paren), Some(right_paren)) | ||
214 | if left_paren.kind() == T!['('] && right_paren.kind() == T![')'] => | ||
215 | { | ||
216 | let mut input_derives = FxHashSet::default(); | ||
217 | let mut current_derive = String::new(); | ||
218 | for token in derive_input | ||
219 | .syntax() | ||
220 | .children_with_tokens() | ||
221 | .filter_map(|token| token.into_token()) | ||
222 | .skip_while(|token| token != &left_paren) | ||
223 | .skip(1) | ||
224 | .take_while(|token| token != &right_paren) | ||
225 | { | ||
226 | if T![,] == token.kind() { | ||
227 | if !current_derive.is_empty() { | ||
228 | input_derives.insert(current_derive); | ||
229 | current_derive = String::new(); | ||
230 | } | ||
231 | } else { | ||
232 | current_derive.push_str(token.text().trim()); | ||
233 | } | ||
234 | } | ||
235 | 290 | ||
236 | if !current_derive.is_empty() { | 291 | input.clear(); |
237 | input_derives.insert(current_derive); | ||
238 | } | ||
239 | Ok(input_derives) | ||
240 | } | ||
241 | _ => Err(()), | ||
242 | } | 292 | } |
243 | } | ||
244 | 293 | ||
245 | fn get_derive_names_in_scope(ctx: &CompletionContext) -> FxHashSet<String> { | 294 | Some(input_derives) |
246 | let mut result = FxHashSet::default(); | ||
247 | ctx.scope.process_all_names(&mut |name, scope_def| { | ||
248 | if let hir::ScopeDef::MacroDef(mac) = scope_def { | ||
249 | // FIXME kind() doesn't check whether proc-macro is a derive | ||
250 | if mac.kind() == hir::MacroKind::Derive || mac.kind() == hir::MacroKind::ProcMacro { | ||
251 | result.insert(name.to_string()); | ||
252 | } | ||
253 | } | ||
254 | }); | ||
255 | result | ||
256 | } | ||
257 | |||
258 | struct DeriveCompletion { | ||
259 | label: &'static str, | ||
260 | dependencies: &'static [&'static str], | ||
261 | } | 295 | } |
262 | 296 | ||
263 | /// Standard Rust derives and the information about their dependencies | ||
264 | /// (the dependencies are needed so that the main derive don't break the compilation when added) | ||
265 | const DEFAULT_DERIVE_COMPLETIONS: &[DeriveCompletion] = &[ | ||
266 | DeriveCompletion { label: "Clone", dependencies: &[] }, | ||
267 | DeriveCompletion { label: "Copy", dependencies: &["Clone"] }, | ||
268 | DeriveCompletion { label: "Debug", dependencies: &[] }, | ||
269 | DeriveCompletion { label: "Default", dependencies: &[] }, | ||
270 | DeriveCompletion { label: "Hash", dependencies: &[] }, | ||
271 | DeriveCompletion { label: "PartialEq", dependencies: &[] }, | ||
272 | DeriveCompletion { label: "Eq", dependencies: &["PartialEq"] }, | ||
273 | DeriveCompletion { label: "PartialOrd", dependencies: &["PartialEq"] }, | ||
274 | DeriveCompletion { label: "Ord", dependencies: &["PartialOrd", "Eq", "PartialEq"] }, | ||
275 | ]; | ||
276 | |||
277 | pub(crate) struct LintCompletion { | ||
278 | pub(crate) label: &'static str, | ||
279 | pub(crate) description: &'static str, | ||
280 | } | ||
281 | |||
282 | #[rustfmt::skip] | ||
283 | const DEFAULT_LINT_COMPLETIONS: &[LintCompletion] = &[ | ||
284 | LintCompletion { label: "absolute_paths_not_starting_with_crate", description: r#"fully qualified paths that start with a module name instead of `crate`, `self`, or an extern crate name"# }, | ||
285 | LintCompletion { label: "anonymous_parameters", description: r#"detects anonymous parameters"# }, | ||
286 | LintCompletion { label: "box_pointers", description: r#"use of owned (Box type) heap memory"# }, | ||
287 | LintCompletion { label: "deprecated_in_future", description: r#"detects use of items that will be deprecated in a future version"# }, | ||
288 | LintCompletion { label: "elided_lifetimes_in_paths", description: r#"hidden lifetime parameters in types are deprecated"# }, | ||
289 | LintCompletion { label: "explicit_outlives_requirements", description: r#"outlives requirements can be inferred"# }, | ||
290 | LintCompletion { label: "indirect_structural_match", description: r#"pattern with const indirectly referencing non-structural-match type"# }, | ||
291 | LintCompletion { label: "keyword_idents", description: r#"detects edition keywords being used as an identifier"# }, | ||
292 | LintCompletion { label: "macro_use_extern_crate", description: r#"the `#[macro_use]` attribute is now deprecated in favor of using macros via the module system"# }, | ||
293 | LintCompletion { label: "meta_variable_misuse", description: r#"possible meta-variable misuse at macro definition"# }, | ||
294 | LintCompletion { label: "missing_copy_implementations", description: r#"detects potentially-forgotten implementations of `Copy`"# }, | ||
295 | LintCompletion { label: "missing_crate_level_docs", description: r#"detects crates with no crate-level documentation"# }, | ||
296 | LintCompletion { label: "missing_debug_implementations", description: r#"detects missing implementations of Debug"# }, | ||
297 | LintCompletion { label: "missing_docs", description: r#"detects missing documentation for public members"# }, | ||
298 | LintCompletion { label: "missing_doc_code_examples", description: r#"detects publicly-exported items without code samples in their documentation"# }, | ||
299 | LintCompletion { label: "non_ascii_idents", description: r#"detects non-ASCII identifiers"# }, | ||
300 | LintCompletion { label: "private_doc_tests", description: r#"detects code samples in docs of private items not documented by rustdoc"# }, | ||
301 | LintCompletion { label: "single_use_lifetimes", description: r#"detects lifetime parameters that are only used once"# }, | ||
302 | LintCompletion { label: "trivial_casts", description: r#"detects trivial casts which could be removed"# }, | ||
303 | LintCompletion { label: "trivial_numeric_casts", description: r#"detects trivial casts of numeric types which could be removed"# }, | ||
304 | LintCompletion { label: "unaligned_references", description: r#"detects unaligned references to fields of packed structs"# }, | ||
305 | LintCompletion { label: "unreachable_pub", description: r#"`pub` items not reachable from crate root"# }, | ||
306 | LintCompletion { label: "unsafe_code", description: r#"usage of `unsafe` code"# }, | ||
307 | LintCompletion { label: "unsafe_op_in_unsafe_fn", description: r#"unsafe operations in unsafe functions without an explicit unsafe block are deprecated"# }, | ||
308 | LintCompletion { label: "unstable_features", description: r#"enabling unstable features (deprecated. do not use)"# }, | ||
309 | LintCompletion { label: "unused_crate_dependencies", description: r#"crate dependencies that are never used"# }, | ||
310 | LintCompletion { label: "unused_extern_crates", description: r#"extern crates that are never used"# }, | ||
311 | LintCompletion { label: "unused_import_braces", description: r#"unnecessary braces around an imported item"# }, | ||
312 | LintCompletion { label: "unused_lifetimes", description: r#"detects lifetime parameters that are never used"# }, | ||
313 | LintCompletion { label: "unused_qualifications", description: r#"detects unnecessarily qualified names"# }, | ||
314 | LintCompletion { label: "unused_results", description: r#"unused result of an expression in a statement"# }, | ||
315 | LintCompletion { label: "variant_size_differences", description: r#"detects enums with widely varying variant sizes"# }, | ||
316 | LintCompletion { label: "array_into_iter", description: r#"detects calling `into_iter` on arrays"# }, | ||
317 | LintCompletion { label: "asm_sub_register", description: r#"using only a subset of a register for inline asm inputs"# }, | ||
318 | LintCompletion { label: "bare_trait_objects", description: r#"suggest using `dyn Trait` for trait objects"# }, | ||
319 | LintCompletion { label: "bindings_with_variant_name", description: r#"detects pattern bindings with the same name as one of the matched variants"# }, | ||
320 | LintCompletion { label: "cenum_impl_drop_cast", description: r#"a C-like enum implementing Drop is cast"# }, | ||
321 | LintCompletion { label: "clashing_extern_declarations", description: r#"detects when an extern fn has been declared with the same name but different types"# }, | ||
322 | LintCompletion { label: "coherence_leak_check", description: r#"distinct impls distinguished only by the leak-check code"# }, | ||
323 | LintCompletion { label: "confusable_idents", description: r#"detects visually confusable pairs between identifiers"# }, | ||
324 | LintCompletion { label: "dead_code", description: r#"detect unused, unexported items"# }, | ||
325 | LintCompletion { label: "deprecated", description: r#"detects use of deprecated items"# }, | ||
326 | LintCompletion { label: "ellipsis_inclusive_range_patterns", description: r#"`...` range patterns are deprecated"# }, | ||
327 | LintCompletion { label: "exported_private_dependencies", description: r#"public interface leaks type from a private dependency"# }, | ||
328 | LintCompletion { label: "illegal_floating_point_literal_pattern", description: r#"floating-point literals cannot be used in patterns"# }, | ||
329 | LintCompletion { label: "improper_ctypes", description: r#"proper use of libc types in foreign modules"# }, | ||
330 | LintCompletion { label: "improper_ctypes_definitions", description: r#"proper use of libc types in foreign item definitions"# }, | ||
331 | LintCompletion { label: "incomplete_features", description: r#"incomplete features that may function improperly in some or all cases"# }, | ||
332 | LintCompletion { label: "inline_no_sanitize", description: r#"detects incompatible use of `#[inline(always)]` and `#[no_sanitize(...)]`"# }, | ||
333 | LintCompletion { label: "intra_doc_link_resolution_failure", description: r#"failures in resolving intra-doc link targets"# }, | ||
334 | LintCompletion { label: "invalid_codeblock_attributes", description: r#"codeblock attribute looks a lot like a known one"# }, | ||
335 | LintCompletion { label: "invalid_value", description: r#"an invalid value is being created (such as a NULL reference)"# }, | ||
336 | LintCompletion { label: "irrefutable_let_patterns", description: r#"detects irrefutable patterns in if-let and while-let statements"# }, | ||
337 | LintCompletion { label: "late_bound_lifetime_arguments", description: r#"detects generic lifetime arguments in path segments with late bound lifetime parameters"# }, | ||
338 | LintCompletion { label: "mixed_script_confusables", description: r#"detects Unicode scripts whose mixed script confusables codepoints are solely used"# }, | ||
339 | LintCompletion { label: "mutable_borrow_reservation_conflict", description: r#"reservation of a two-phased borrow conflicts with other shared borrows"# }, | ||
340 | LintCompletion { label: "non_camel_case_types", description: r#"types, variants, traits and type parameters should have camel case names"# }, | ||
341 | LintCompletion { label: "non_shorthand_field_patterns", description: r#"using `Struct { x: x }` instead of `Struct { x }` in a pattern"# }, | ||
342 | LintCompletion { label: "non_snake_case", description: r#"variables, methods, functions, lifetime parameters and modules should have snake case names"# }, | ||
343 | LintCompletion { label: "non_upper_case_globals", description: r#"static constants should have uppercase identifiers"# }, | ||
344 | LintCompletion { label: "no_mangle_generic_items", description: r#"generic items must be mangled"# }, | ||
345 | LintCompletion { label: "overlapping_patterns", description: r#"detects overlapping patterns"# }, | ||
346 | LintCompletion { label: "path_statements", description: r#"path statements with no effect"# }, | ||
347 | LintCompletion { label: "private_in_public", description: r#"detect private items in public interfaces not caught by the old implementation"# }, | ||
348 | LintCompletion { label: "proc_macro_derive_resolution_fallback", description: r#"detects proc macro derives using inaccessible names from parent modules"# }, | ||
349 | LintCompletion { label: "redundant_semicolons", description: r#"detects unnecessary trailing semicolons"# }, | ||
350 | LintCompletion { label: "renamed_and_removed_lints", description: r#"lints that have been renamed or removed"# }, | ||
351 | LintCompletion { label: "safe_packed_borrows", description: r#"safe borrows of fields of packed structs were erroneously allowed"# }, | ||
352 | LintCompletion { label: "stable_features", description: r#"stable features found in `#[feature]` directive"# }, | ||
353 | LintCompletion { label: "trivial_bounds", description: r#"these bounds don't depend on an type parameters"# }, | ||
354 | LintCompletion { label: "type_alias_bounds", description: r#"bounds in type aliases are not enforced"# }, | ||
355 | LintCompletion { label: "tyvar_behind_raw_pointer", description: r#"raw pointer to an inference variable"# }, | ||
356 | LintCompletion { label: "uncommon_codepoints", description: r#"detects uncommon Unicode codepoints in identifiers"# }, | ||
357 | LintCompletion { label: "unconditional_recursion", description: r#"functions that cannot return without calling themselves"# }, | ||
358 | LintCompletion { label: "unknown_lints", description: r#"unrecognized lint attribute"# }, | ||
359 | LintCompletion { label: "unnameable_test_items", description: r#"detects an item that cannot be named being marked as `#[test_case]`"# }, | ||
360 | LintCompletion { label: "unreachable_code", description: r#"detects unreachable code paths"# }, | ||
361 | LintCompletion { label: "unreachable_patterns", description: r#"detects unreachable patterns"# }, | ||
362 | LintCompletion { label: "unstable_name_collisions", description: r#"detects name collision with an existing but unstable method"# }, | ||
363 | LintCompletion { label: "unused_allocation", description: r#"detects unnecessary allocations that can be eliminated"# }, | ||
364 | LintCompletion { label: "unused_assignments", description: r#"detect assignments that will never be read"# }, | ||
365 | LintCompletion { label: "unused_attributes", description: r#"detects attributes that were not used by the compiler"# }, | ||
366 | LintCompletion { label: "unused_braces", description: r#"unnecessary braces around an expression"# }, | ||
367 | LintCompletion { label: "unused_comparisons", description: r#"comparisons made useless by limits of the types involved"# }, | ||
368 | LintCompletion { label: "unused_doc_comments", description: r#"detects doc comments that aren't used by rustdoc"# }, | ||
369 | LintCompletion { label: "unused_features", description: r#"unused features found in crate-level `#[feature]` directives"# }, | ||
370 | LintCompletion { label: "unused_imports", description: r#"imports that are never used"# }, | ||
371 | LintCompletion { label: "unused_labels", description: r#"detects labels that are never used"# }, | ||
372 | LintCompletion { label: "unused_macros", description: r#"detects macros that were not used"# }, | ||
373 | LintCompletion { label: "unused_must_use", description: r#"unused result of a type flagged as `#[must_use]`"# }, | ||
374 | LintCompletion { label: "unused_mut", description: r#"detect mut variables which don't need to be mutable"# }, | ||
375 | LintCompletion { label: "unused_parens", description: r#"`if`, `match`, `while` and `return` do not need parentheses"# }, | ||
376 | LintCompletion { label: "unused_unsafe", description: r#"unnecessary use of an `unsafe` block"# }, | ||
377 | LintCompletion { label: "unused_variables", description: r#"detect variables which are not used in any way"# }, | ||
378 | LintCompletion { label: "warnings", description: r#"mass-change the level for lints which produce warnings"# }, | ||
379 | LintCompletion { label: "where_clauses_object_safety", description: r#"checks the object safety of where clauses"# }, | ||
380 | LintCompletion { label: "while_true", description: r#"suggest using `loop { }` instead of `while true { }`"# }, | ||
381 | LintCompletion { label: "ambiguous_associated_items", description: r#"ambiguous associated items"# }, | ||
382 | LintCompletion { label: "arithmetic_overflow", description: r#"arithmetic operation overflows"# }, | ||
383 | LintCompletion { label: "conflicting_repr_hints", description: r#"conflicts between `#[repr(..)]` hints that were previously accepted and used in practice"# }, | ||
384 | LintCompletion { label: "const_err", description: r#"constant evaluation detected erroneous expression"# }, | ||
385 | LintCompletion { label: "ill_formed_attribute_input", description: r#"ill-formed attribute inputs that were previously accepted and used in practice"# }, | ||
386 | LintCompletion { label: "incomplete_include", description: r#"trailing content in included file"# }, | ||
387 | LintCompletion { label: "invalid_type_param_default", description: r#"type parameter default erroneously allowed in invalid location"# }, | ||
388 | LintCompletion { label: "macro_expanded_macro_exports_accessed_by_absolute_paths", description: r#"macro-expanded `macro_export` macros from the current crate cannot be referred to by absolute paths"# }, | ||
389 | LintCompletion { label: "missing_fragment_specifier", description: r#"detects missing fragment specifiers in unused `macro_rules!` patterns"# }, | ||
390 | LintCompletion { label: "mutable_transmutes", description: r#"mutating transmuted &mut T from &T may cause undefined behavior"# }, | ||
391 | LintCompletion { label: "no_mangle_const_items", description: r#"const items will not have their symbols exported"# }, | ||
392 | LintCompletion { label: "order_dependent_trait_objects", description: r#"trait-object types were treated as different depending on marker-trait order"# }, | ||
393 | LintCompletion { label: "overflowing_literals", description: r#"literal out of range for its type"# }, | ||
394 | LintCompletion { label: "patterns_in_fns_without_body", description: r#"patterns in functions without body were erroneously allowed"# }, | ||
395 | LintCompletion { label: "pub_use_of_private_extern_crate", description: r#"detect public re-exports of private extern crates"# }, | ||
396 | LintCompletion { label: "soft_unstable", description: r#"a feature gate that doesn't break dependent crates"# }, | ||
397 | LintCompletion { label: "unconditional_panic", description: r#"operation will cause a panic at runtime"# }, | ||
398 | LintCompletion { label: "unknown_crate_types", description: r#"unknown crate type found in `#[crate_type]` directive"# }, | ||
399 | ]; | ||
400 | |||
401 | #[cfg(test)] | 297 | #[cfg(test)] |
402 | mod tests { | 298 | mod tests { |
299 | use super::*; | ||
300 | |||
403 | use expect_test::{expect, Expect}; | 301 | use expect_test::{expect, Expect}; |
404 | 302 | ||
405 | use crate::{test_utils::completion_list, CompletionKind}; | 303 | use crate::{test_utils::completion_list, CompletionKind}; |
406 | 304 | ||
305 | #[test] | ||
306 | fn attributes_are_sorted() { | ||
307 | let mut attrs = ATTRIBUTES.iter().map(|attr| attr.key()); | ||
308 | let mut prev = attrs.next().unwrap(); | ||
309 | |||
310 | attrs.for_each(|next| { | ||
311 | assert!( | ||
312 | prev < next, | ||
313 | r#"ATTRIBUTES array is not sorted, "{}" should come after "{}""#, | ||
314 | prev, | ||
315 | next | ||
316 | ); | ||
317 | prev = next; | ||
318 | }); | ||
319 | } | ||
320 | |||
407 | fn check(ra_fixture: &str, expect: Expect) { | 321 | fn check(ra_fixture: &str, expect: Expect) { |
408 | let actual = completion_list(ra_fixture, CompletionKind::Attribute); | 322 | let actual = completion_list(ra_fixture, CompletionKind::Attribute); |
409 | expect.assert_eq(&actual); | 323 | expect.assert_eq(&actual); |
410 | } | 324 | } |
411 | 325 | ||
412 | #[test] | 326 | #[test] |
413 | fn empty_derive_completion() { | 327 | fn test_attribute_completion_inside_nested_attr() { |
328 | check(r#"#[cfg($0)]"#, expect![[]]) | ||
329 | } | ||
330 | |||
331 | #[test] | ||
332 | fn test_attribute_completion_with_existing_attr() { | ||
414 | check( | 333 | check( |
415 | r#" | 334 | r#"#[no_mangle] #[$0] mcall!();"#, |
416 | #[derive($0)] | ||
417 | struct Test {} | ||
418 | "#, | ||
419 | expect![[r#" | 335 | expect![[r#" |
420 | at Clone | 336 | at allow(…) |
421 | at Clone, Copy | 337 | at cfg(…) |
422 | at Debug | 338 | at cfg_attr(…) |
423 | at Default | 339 | at deny(…) |
424 | at Hash | 340 | at forbid(…) |
425 | at PartialEq | 341 | at warn(…) |
426 | at PartialEq, Eq | 342 | "#]], |
427 | at PartialEq, PartialOrd | 343 | ) |
428 | at PartialEq, Eq, PartialOrd, Ord | 344 | } |
345 | |||
346 | #[test] | ||
347 | fn complete_attribute_on_source_file() { | ||
348 | check( | ||
349 | r#"#![$0]"#, | ||
350 | expect![[r#" | ||
351 | at allow(…) | ||
352 | at cfg(…) | ||
353 | at cfg_attr(…) | ||
354 | at deny(…) | ||
355 | at forbid(…) | ||
356 | at warn(…) | ||
357 | at deprecated | ||
358 | at doc = "…" | ||
359 | at doc(hidden) | ||
360 | at doc(alias = "…") | ||
361 | at must_use | ||
362 | at no_mangle | ||
363 | at crate_name = "" | ||
364 | at feature(…) | ||
365 | at no_implicit_prelude | ||
366 | at no_main | ||
367 | at no_std | ||
368 | at recursion_limit = … | ||
369 | at type_length_limit = … | ||
370 | at windows_subsystem = "…" | ||
429 | "#]], | 371 | "#]], |
430 | ); | 372 | ); |
431 | } | 373 | } |
432 | 374 | ||
433 | #[test] | 375 | #[test] |
434 | fn no_completion_for_incorrect_derive() { | 376 | fn complete_attribute_on_module() { |
435 | check( | 377 | check( |
436 | r#" | 378 | r#"#[$0] mod foo;"#, |
437 | #[derive{$0)] | 379 | expect![[r#" |
438 | struct Test {} | 380 | at allow(…) |
439 | "#, | 381 | at cfg(…) |
440 | expect![[r#""#]], | 382 | at cfg_attr(…) |
441 | ) | 383 | at deny(…) |
384 | at forbid(…) | ||
385 | at warn(…) | ||
386 | at deprecated | ||
387 | at doc = "…" | ||
388 | at doc(hidden) | ||
389 | at doc(alias = "…") | ||
390 | at must_use | ||
391 | at no_mangle | ||
392 | at path = "…" | ||
393 | "#]], | ||
394 | ); | ||
395 | check( | ||
396 | r#"mod foo {#![$0]}"#, | ||
397 | expect![[r#" | ||
398 | at allow(…) | ||
399 | at cfg(…) | ||
400 | at cfg_attr(…) | ||
401 | at deny(…) | ||
402 | at forbid(…) | ||
403 | at warn(…) | ||
404 | at deprecated | ||
405 | at doc = "…" | ||
406 | at doc(hidden) | ||
407 | at doc(alias = "…") | ||
408 | at must_use | ||
409 | at no_mangle | ||
410 | at no_implicit_prelude | ||
411 | "#]], | ||
412 | ); | ||
442 | } | 413 | } |
443 | 414 | ||
444 | #[test] | 415 | #[test] |
445 | fn derive_with_input_completion() { | 416 | fn complete_attribute_on_macro_rules() { |
446 | check( | 417 | check( |
447 | r#" | 418 | r#"#[$0] macro_rules! foo {}"#, |
448 | #[derive(serde::Serialize, PartialEq, $0)] | ||
449 | struct Test {} | ||
450 | "#, | ||
451 | expect![[r#" | 419 | expect![[r#" |
452 | at Clone | 420 | at allow(…) |
453 | at Clone, Copy | 421 | at cfg(…) |
454 | at Debug | 422 | at cfg_attr(…) |
455 | at Default | 423 | at deny(…) |
456 | at Hash | 424 | at forbid(…) |
457 | at Eq | 425 | at warn(…) |
458 | at PartialOrd | 426 | at deprecated |
459 | at Eq, PartialOrd, Ord | 427 | at doc = "…" |
428 | at doc(hidden) | ||
429 | at doc(alias = "…") | ||
430 | at must_use | ||
431 | at no_mangle | ||
432 | at macro_export | ||
433 | at macro_use | ||
460 | "#]], | 434 | "#]], |
461 | ) | 435 | ); |
462 | } | 436 | } |
463 | 437 | ||
464 | #[test] | 438 | #[test] |
465 | fn test_attribute_completion() { | 439 | fn complete_attribute_on_macro_def() { |
466 | check( | 440 | check( |
467 | r#"#[$0]"#, | 441 | r#"#[$0] macro foo {}"#, |
468 | expect![[r#" | 442 | expect![[r#" |
469 | at allow(…) | 443 | at allow(…) |
470 | at automatically_derived | 444 | at cfg(…) |
471 | at cfg_attr(…) | 445 | at cfg_attr(…) |
446 | at deny(…) | ||
447 | at forbid(…) | ||
448 | at warn(…) | ||
449 | at deprecated | ||
450 | at doc = "…" | ||
451 | at doc(hidden) | ||
452 | at doc(alias = "…") | ||
453 | at must_use | ||
454 | at no_mangle | ||
455 | "#]], | ||
456 | ); | ||
457 | } | ||
458 | |||
459 | #[test] | ||
460 | fn complete_attribute_on_extern_crate() { | ||
461 | check( | ||
462 | r#"#[$0] extern crate foo;"#, | ||
463 | expect![[r#" | ||
464 | at allow(…) | ||
472 | at cfg(…) | 465 | at cfg(…) |
473 | at cold | 466 | at cfg_attr(…) |
474 | at deny(…) | 467 | at deny(…) |
468 | at forbid(…) | ||
469 | at warn(…) | ||
475 | at deprecated | 470 | at deprecated |
476 | at derive(…) | 471 | at doc = "…" |
477 | at export_name = "…" | 472 | at doc(hidden) |
478 | at doc(alias = "…") | 473 | at doc(alias = "…") |
474 | at must_use | ||
475 | at no_mangle | ||
476 | at macro_use | ||
477 | "#]], | ||
478 | ); | ||
479 | } | ||
480 | |||
481 | #[test] | ||
482 | fn complete_attribute_on_use() { | ||
483 | check( | ||
484 | r#"#[$0] use foo;"#, | ||
485 | expect![[r#" | ||
486 | at allow(…) | ||
487 | at cfg(…) | ||
488 | at cfg_attr(…) | ||
489 | at deny(…) | ||
490 | at forbid(…) | ||
491 | at warn(…) | ||
492 | at deprecated | ||
479 | at doc = "…" | 493 | at doc = "…" |
480 | at doc(hidden) | 494 | at doc(hidden) |
495 | at doc(alias = "…") | ||
496 | at must_use | ||
497 | at no_mangle | ||
498 | "#]], | ||
499 | ); | ||
500 | } | ||
501 | |||
502 | #[test] | ||
503 | fn complete_attribute_on_type_alias() { | ||
504 | check( | ||
505 | r#"#[$0] type foo = ();"#, | ||
506 | expect![[r#" | ||
507 | at allow(…) | ||
508 | at cfg(…) | ||
509 | at cfg_attr(…) | ||
510 | at deny(…) | ||
481 | at forbid(…) | 511 | at forbid(…) |
482 | at ignore = "…" | 512 | at warn(…) |
483 | at inline | 513 | at deprecated |
484 | at link | 514 | at doc = "…" |
485 | at link_name = "…" | 515 | at doc(hidden) |
486 | at link_section = "…" | 516 | at doc(alias = "…") |
487 | at macro_export | ||
488 | at macro_use | ||
489 | at must_use | 517 | at must_use |
490 | at no_mangle | 518 | at no_mangle |
519 | "#]], | ||
520 | ); | ||
521 | } | ||
522 | |||
523 | #[test] | ||
524 | fn complete_attribute_on_struct() { | ||
525 | check( | ||
526 | r#"#[$0] struct Foo;"#, | ||
527 | expect![[r#" | ||
528 | at allow(…) | ||
529 | at cfg(…) | ||
530 | at cfg_attr(…) | ||
531 | at deny(…) | ||
532 | at forbid(…) | ||
533 | at warn(…) | ||
534 | at deprecated | ||
535 | at doc = "…" | ||
536 | at doc(hidden) | ||
537 | at doc(alias = "…") | ||
538 | at must_use | ||
539 | at no_mangle | ||
540 | at derive(…) | ||
541 | at repr(…) | ||
491 | at non_exhaustive | 542 | at non_exhaustive |
492 | at path = "…" | 543 | "#]], |
493 | at proc_macro | 544 | ); |
494 | at proc_macro_attribute | 545 | } |
495 | at proc_macro_derive(…) | 546 | |
547 | #[test] | ||
548 | fn complete_attribute_on_enum() { | ||
549 | check( | ||
550 | r#"#[$0] enum Foo {}"#, | ||
551 | expect![[r#" | ||
552 | at allow(…) | ||
553 | at cfg(…) | ||
554 | at cfg_attr(…) | ||
555 | at deny(…) | ||
556 | at forbid(…) | ||
557 | at warn(…) | ||
558 | at deprecated | ||
559 | at doc = "…" | ||
560 | at doc(hidden) | ||
561 | at doc(alias = "…") | ||
562 | at must_use | ||
563 | at no_mangle | ||
564 | at derive(…) | ||
496 | at repr(…) | 565 | at repr(…) |
497 | at should_panic | 566 | at non_exhaustive |
498 | at target_feature = "…" | 567 | "#]], |
499 | at test | 568 | ); |
500 | at track_caller | 569 | } |
501 | at used | 570 | |
571 | #[test] | ||
572 | fn complete_attribute_on_const() { | ||
573 | check( | ||
574 | r#"#[$0] const FOO: () = ();"#, | ||
575 | expect![[r#" | ||
576 | at allow(…) | ||
577 | at cfg(…) | ||
578 | at cfg_attr(…) | ||
579 | at deny(…) | ||
580 | at forbid(…) | ||
502 | at warn(…) | 581 | at warn(…) |
582 | at deprecated | ||
583 | at doc = "…" | ||
584 | at doc(hidden) | ||
585 | at doc(alias = "…") | ||
586 | at must_use | ||
587 | at no_mangle | ||
503 | "#]], | 588 | "#]], |
504 | ) | 589 | ); |
505 | } | 590 | } |
506 | 591 | ||
507 | #[test] | 592 | #[test] |
508 | fn test_attribute_completion_inside_nested_attr() { | 593 | fn complete_attribute_on_static() { |
509 | check(r#"#[cfg($0)]"#, expect![[]]) | 594 | check( |
595 | r#"#[$0] static FOO: () = ()"#, | ||
596 | expect![[r#" | ||
597 | at allow(…) | ||
598 | at cfg(…) | ||
599 | at cfg_attr(…) | ||
600 | at deny(…) | ||
601 | at forbid(…) | ||
602 | at warn(…) | ||
603 | at deprecated | ||
604 | at doc = "…" | ||
605 | at doc(hidden) | ||
606 | at doc(alias = "…") | ||
607 | at must_use | ||
608 | at no_mangle | ||
609 | at export_name = "…" | ||
610 | at link_name = "…" | ||
611 | at link_section = "…" | ||
612 | at used | ||
613 | "#]], | ||
614 | ); | ||
510 | } | 615 | } |
511 | 616 | ||
512 | #[test] | 617 | #[test] |
513 | fn test_inner_attribute_completion() { | 618 | fn complete_attribute_on_trait() { |
514 | check( | 619 | check( |
515 | r"#![$0]", | 620 | r#"#[$0] trait Foo {}"#, |
516 | expect![[r#" | 621 | expect![[r#" |
517 | at allow(…) | 622 | at allow(…) |
518 | at automatically_derived | 623 | at cfg(…) |
519 | at cfg_attr(…) | 624 | at cfg_attr(…) |
625 | at deny(…) | ||
626 | at forbid(…) | ||
627 | at warn(…) | ||
628 | at deprecated | ||
629 | at doc = "…" | ||
630 | at doc(hidden) | ||
631 | at doc(alias = "…") | ||
632 | at must_use | ||
633 | at no_mangle | ||
634 | at must_use | ||
635 | "#]], | ||
636 | ); | ||
637 | } | ||
638 | |||
639 | #[test] | ||
640 | fn complete_attribute_on_impl() { | ||
641 | check( | ||
642 | r#"#[$0] impl () {}"#, | ||
643 | expect![[r#" | ||
644 | at allow(…) | ||
520 | at cfg(…) | 645 | at cfg(…) |
521 | at cold | 646 | at cfg_attr(…) |
522 | at crate_name = "" | ||
523 | at deny(…) | 647 | at deny(…) |
648 | at forbid(…) | ||
649 | at warn(…) | ||
524 | at deprecated | 650 | at deprecated |
525 | at derive(…) | 651 | at doc = "…" |
526 | at export_name = "…" | 652 | at doc(hidden) |
527 | at doc(alias = "…") | 653 | at doc(alias = "…") |
654 | at must_use | ||
655 | at no_mangle | ||
656 | at automatically_derived | ||
657 | "#]], | ||
658 | ); | ||
659 | check( | ||
660 | r#"impl () {#![$0]}"#, | ||
661 | expect![[r#" | ||
662 | at allow(…) | ||
663 | at cfg(…) | ||
664 | at cfg_attr(…) | ||
665 | at deny(…) | ||
666 | at forbid(…) | ||
667 | at warn(…) | ||
668 | at deprecated | ||
528 | at doc = "…" | 669 | at doc = "…" |
529 | at doc(hidden) | 670 | at doc(hidden) |
530 | at feature(…) | 671 | at doc(alias = "…") |
672 | at must_use | ||
673 | at no_mangle | ||
674 | "#]], | ||
675 | ); | ||
676 | } | ||
677 | |||
678 | #[test] | ||
679 | fn complete_attribute_on_extern_block() { | ||
680 | check( | ||
681 | r#"#[$0] extern {}"#, | ||
682 | expect![[r#" | ||
683 | at allow(…) | ||
684 | at cfg(…) | ||
685 | at cfg_attr(…) | ||
686 | at deny(…) | ||
531 | at forbid(…) | 687 | at forbid(…) |
532 | at global_allocator | 688 | at warn(…) |
533 | at ignore = "…" | 689 | at deprecated |
534 | at inline | 690 | at doc = "…" |
691 | at doc(hidden) | ||
692 | at doc(alias = "…") | ||
693 | at must_use | ||
694 | at no_mangle | ||
535 | at link | 695 | at link |
536 | at link_name = "…" | 696 | "#]], |
537 | at link_section = "…" | 697 | ); |
538 | at macro_export | 698 | check( |
539 | at macro_use | 699 | r#"extern {#![$0]}"#, |
700 | expect![[r#" | ||
701 | at allow(…) | ||
702 | at cfg(…) | ||
703 | at cfg_attr(…) | ||
704 | at deny(…) | ||
705 | at forbid(…) | ||
706 | at warn(…) | ||
707 | at deprecated | ||
708 | at doc = "…" | ||
709 | at doc(hidden) | ||
710 | at doc(alias = "…") | ||
540 | at must_use | 711 | at must_use |
541 | at no_link | ||
542 | at no_implicit_prelude | ||
543 | at no_main | ||
544 | at no_mangle | 712 | at no_mangle |
545 | at no_std | 713 | at link |
714 | "#]], | ||
715 | ); | ||
716 | } | ||
717 | |||
718 | #[test] | ||
719 | fn complete_attribute_on_variant() { | ||
720 | check( | ||
721 | r#"enum Foo { #[$0] Bar }"#, | ||
722 | expect![[r#" | ||
723 | at allow(…) | ||
724 | at cfg(…) | ||
725 | at cfg_attr(…) | ||
726 | at deny(…) | ||
727 | at forbid(…) | ||
728 | at warn(…) | ||
546 | at non_exhaustive | 729 | at non_exhaustive |
547 | at panic_handler | 730 | "#]], |
548 | at path = "…" | 731 | ); |
549 | at proc_macro | 732 | } |
550 | at proc_macro_attribute | 733 | |
551 | at proc_macro_derive(…) | 734 | #[test] |
552 | at recursion_limit = … | 735 | fn complete_attribute_on_expr() { |
553 | at repr(…) | 736 | check( |
554 | at should_panic | 737 | r#"fn main() { #[$0] foo() }"#, |
555 | at target_feature = "…" | 738 | expect![[r#" |
556 | at test | 739 | at allow(…) |
557 | at track_caller | 740 | at cfg(…) |
558 | at type_length_limit = … | 741 | at cfg_attr(…) |
559 | at used | 742 | at deny(…) |
743 | at forbid(…) | ||
744 | at warn(…) | ||
745 | "#]], | ||
746 | ); | ||
747 | check( | ||
748 | r#"fn main() { #[$0] foo(); }"#, | ||
749 | expect![[r#" | ||
750 | at allow(…) | ||
751 | at cfg(…) | ||
752 | at cfg_attr(…) | ||
753 | at deny(…) | ||
754 | at forbid(…) | ||
560 | at warn(…) | 755 | at warn(…) |
561 | at windows_subsystem = "…" | ||
562 | "#]], | 756 | "#]], |
563 | ); | 757 | ); |
564 | } | 758 | } |
diff --git a/crates/ide_completion/src/completions/attribute/derive.rs b/crates/ide_completion/src/completions/attribute/derive.rs new file mode 100644 index 000000000..0bc3eab98 --- /dev/null +++ b/crates/ide_completion/src/completions/attribute/derive.rs | |||
@@ -0,0 +1,147 @@ | |||
1 | //! Completion for derives | ||
2 | use itertools::Itertools; | ||
3 | use rustc_hash::FxHashSet; | ||
4 | use syntax::ast; | ||
5 | |||
6 | use crate::{ | ||
7 | context::CompletionContext, | ||
8 | item::{CompletionItem, CompletionItemKind, CompletionKind}, | ||
9 | Completions, | ||
10 | }; | ||
11 | |||
12 | pub(super) fn complete_derive( | ||
13 | acc: &mut Completions, | ||
14 | ctx: &CompletionContext, | ||
15 | derive_input: ast::TokenTree, | ||
16 | ) { | ||
17 | if let Some(existing_derives) = super::parse_comma_sep_input(derive_input) { | ||
18 | for derive_completion in DEFAULT_DERIVE_COMPLETIONS | ||
19 | .iter() | ||
20 | .filter(|completion| !existing_derives.contains(completion.label)) | ||
21 | { | ||
22 | let mut components = vec![derive_completion.label]; | ||
23 | components.extend( | ||
24 | derive_completion | ||
25 | .dependencies | ||
26 | .iter() | ||
27 | .filter(|&&dependency| !existing_derives.contains(dependency)), | ||
28 | ); | ||
29 | let lookup = components.join(", "); | ||
30 | let label = components.iter().rev().join(", "); | ||
31 | let mut item = | ||
32 | CompletionItem::new(CompletionKind::Attribute, ctx.source_range(), label); | ||
33 | item.lookup_by(lookup).kind(CompletionItemKind::Attribute); | ||
34 | item.add_to(acc); | ||
35 | } | ||
36 | |||
37 | for custom_derive_name in get_derive_names_in_scope(ctx).difference(&existing_derives) { | ||
38 | let mut item = CompletionItem::new( | ||
39 | CompletionKind::Attribute, | ||
40 | ctx.source_range(), | ||
41 | custom_derive_name, | ||
42 | ); | ||
43 | item.kind(CompletionItemKind::Attribute); | ||
44 | item.add_to(acc); | ||
45 | } | ||
46 | } | ||
47 | } | ||
48 | |||
49 | fn get_derive_names_in_scope(ctx: &CompletionContext) -> FxHashSet<String> { | ||
50 | let mut result = FxHashSet::default(); | ||
51 | ctx.scope.process_all_names(&mut |name, scope_def| { | ||
52 | if let hir::ScopeDef::MacroDef(mac) = scope_def { | ||
53 | if mac.kind() == hir::MacroKind::Derive { | ||
54 | result.insert(name.to_string()); | ||
55 | } | ||
56 | } | ||
57 | }); | ||
58 | result | ||
59 | } | ||
60 | |||
61 | struct DeriveCompletion { | ||
62 | label: &'static str, | ||
63 | dependencies: &'static [&'static str], | ||
64 | } | ||
65 | |||
66 | /// Standard Rust derives and the information about their dependencies | ||
67 | /// (the dependencies are needed so that the main derive don't break the compilation when added) | ||
68 | const DEFAULT_DERIVE_COMPLETIONS: &[DeriveCompletion] = &[ | ||
69 | DeriveCompletion { label: "Clone", dependencies: &[] }, | ||
70 | DeriveCompletion { label: "Copy", dependencies: &["Clone"] }, | ||
71 | DeriveCompletion { label: "Debug", dependencies: &[] }, | ||
72 | DeriveCompletion { label: "Default", dependencies: &[] }, | ||
73 | DeriveCompletion { label: "Hash", dependencies: &[] }, | ||
74 | DeriveCompletion { label: "PartialEq", dependencies: &[] }, | ||
75 | DeriveCompletion { label: "Eq", dependencies: &["PartialEq"] }, | ||
76 | DeriveCompletion { label: "PartialOrd", dependencies: &["PartialEq"] }, | ||
77 | DeriveCompletion { label: "Ord", dependencies: &["PartialOrd", "Eq", "PartialEq"] }, | ||
78 | ]; | ||
79 | |||
80 | #[cfg(test)] | ||
81 | mod tests { | ||
82 | use expect_test::{expect, Expect}; | ||
83 | |||
84 | use crate::{test_utils::completion_list, CompletionKind}; | ||
85 | |||
86 | fn check(ra_fixture: &str, expect: Expect) { | ||
87 | let actual = completion_list(ra_fixture, CompletionKind::Attribute); | ||
88 | expect.assert_eq(&actual); | ||
89 | } | ||
90 | |||
91 | #[test] | ||
92 | fn no_completion_for_incorrect_derive() { | ||
93 | check(r#"#[derive{$0)] struct Test;"#, expect![[]]) | ||
94 | } | ||
95 | |||
96 | #[test] | ||
97 | fn empty_derive() { | ||
98 | check( | ||
99 | r#"#[derive($0)] struct Test;"#, | ||
100 | expect![[r#" | ||
101 | at Clone | ||
102 | at Clone, Copy | ||
103 | at Debug | ||
104 | at Default | ||
105 | at Hash | ||
106 | at PartialEq | ||
107 | at PartialEq, Eq | ||
108 | at PartialEq, PartialOrd | ||
109 | at PartialEq, Eq, PartialOrd, Ord | ||
110 | "#]], | ||
111 | ); | ||
112 | } | ||
113 | |||
114 | #[test] | ||
115 | fn derive_with_input() { | ||
116 | check( | ||
117 | r#"#[derive(serde::Serialize, PartialEq, $0)] struct Test;"#, | ||
118 | expect![[r#" | ||
119 | at Clone | ||
120 | at Clone, Copy | ||
121 | at Debug | ||
122 | at Default | ||
123 | at Hash | ||
124 | at Eq | ||
125 | at PartialOrd | ||
126 | at Eq, PartialOrd, Ord | ||
127 | "#]], | ||
128 | ) | ||
129 | } | ||
130 | |||
131 | #[test] | ||
132 | fn derive_with_input2() { | ||
133 | check( | ||
134 | r#"#[derive($0 serde::Serialize, PartialEq)] struct Test;"#, | ||
135 | expect![[r#" | ||
136 | at Clone | ||
137 | at Clone, Copy | ||
138 | at Debug | ||
139 | at Default | ||
140 | at Hash | ||
141 | at Eq | ||
142 | at PartialOrd | ||
143 | at Eq, PartialOrd, Ord | ||
144 | "#]], | ||
145 | ) | ||
146 | } | ||
147 | } | ||
diff --git a/crates/ide_completion/src/completions/attribute/lint.rs b/crates/ide_completion/src/completions/attribute/lint.rs new file mode 100644 index 000000000..403630dce --- /dev/null +++ b/crates/ide_completion/src/completions/attribute/lint.rs | |||
@@ -0,0 +1,187 @@ | |||
1 | //! Completion for lints | ||
2 | use syntax::ast; | ||
3 | |||
4 | use crate::{ | ||
5 | context::CompletionContext, | ||
6 | item::{CompletionItem, CompletionItemKind, CompletionKind}, | ||
7 | Completions, | ||
8 | }; | ||
9 | |||
10 | pub(super) fn complete_lint( | ||
11 | acc: &mut Completions, | ||
12 | ctx: &CompletionContext, | ||
13 | derive_input: ast::TokenTree, | ||
14 | lints_completions: &[LintCompletion], | ||
15 | ) { | ||
16 | if let Some(existing_lints) = super::parse_comma_sep_input(derive_input) { | ||
17 | for lint_completion in lints_completions | ||
18 | .into_iter() | ||
19 | .filter(|completion| !existing_lints.contains(completion.label)) | ||
20 | { | ||
21 | let mut item = CompletionItem::new( | ||
22 | CompletionKind::Attribute, | ||
23 | ctx.source_range(), | ||
24 | lint_completion.label, | ||
25 | ); | ||
26 | item.kind(CompletionItemKind::Attribute).detail(lint_completion.description); | ||
27 | item.add_to(acc) | ||
28 | } | ||
29 | } | ||
30 | } | ||
31 | |||
32 | pub(crate) struct LintCompletion { | ||
33 | pub(crate) label: &'static str, | ||
34 | pub(crate) description: &'static str, | ||
35 | } | ||
36 | |||
37 | #[rustfmt::skip] | ||
38 | pub(super) const DEFAULT_LINT_COMPLETIONS: &[LintCompletion] = &[ | ||
39 | LintCompletion { label: "absolute_paths_not_starting_with_crate", description: r#"fully qualified paths that start with a module name instead of `crate`, `self`, or an extern crate name"# }, | ||
40 | LintCompletion { label: "anonymous_parameters", description: r#"detects anonymous parameters"# }, | ||
41 | LintCompletion { label: "box_pointers", description: r#"use of owned (Box type) heap memory"# }, | ||
42 | LintCompletion { label: "deprecated_in_future", description: r#"detects use of items that will be deprecated in a future version"# }, | ||
43 | LintCompletion { label: "elided_lifetimes_in_paths", description: r#"hidden lifetime parameters in types are deprecated"# }, | ||
44 | LintCompletion { label: "explicit_outlives_requirements", description: r#"outlives requirements can be inferred"# }, | ||
45 | LintCompletion { label: "indirect_structural_match", description: r#"pattern with const indirectly referencing non-structural-match type"# }, | ||
46 | LintCompletion { label: "keyword_idents", description: r#"detects edition keywords being used as an identifier"# }, | ||
47 | LintCompletion { label: "macro_use_extern_crate", description: r#"the `#[macro_use]` attribute is now deprecated in favor of using macros via the module system"# }, | ||
48 | LintCompletion { label: "meta_variable_misuse", description: r#"possible meta-variable misuse at macro definition"# }, | ||
49 | LintCompletion { label: "missing_copy_implementations", description: r#"detects potentially-forgotten implementations of `Copy`"# }, | ||
50 | LintCompletion { label: "missing_crate_level_docs", description: r#"detects crates with no crate-level documentation"# }, | ||
51 | LintCompletion { label: "missing_debug_implementations", description: r#"detects missing implementations of Debug"# }, | ||
52 | LintCompletion { label: "missing_docs", description: r#"detects missing documentation for public members"# }, | ||
53 | LintCompletion { label: "missing_doc_code_examples", description: r#"detects publicly-exported items without code samples in their documentation"# }, | ||
54 | LintCompletion { label: "non_ascii_idents", description: r#"detects non-ASCII identifiers"# }, | ||
55 | LintCompletion { label: "private_doc_tests", description: r#"detects code samples in docs of private items not documented by rustdoc"# }, | ||
56 | LintCompletion { label: "single_use_lifetimes", description: r#"detects lifetime parameters that are only used once"# }, | ||
57 | LintCompletion { label: "trivial_casts", description: r#"detects trivial casts which could be removed"# }, | ||
58 | LintCompletion { label: "trivial_numeric_casts", description: r#"detects trivial casts of numeric types which could be removed"# }, | ||
59 | LintCompletion { label: "unaligned_references", description: r#"detects unaligned references to fields of packed structs"# }, | ||
60 | LintCompletion { label: "unreachable_pub", description: r#"`pub` items not reachable from crate root"# }, | ||
61 | LintCompletion { label: "unsafe_code", description: r#"usage of `unsafe` code"# }, | ||
62 | LintCompletion { label: "unsafe_op_in_unsafe_fn", description: r#"unsafe operations in unsafe functions without an explicit unsafe block are deprecated"# }, | ||
63 | LintCompletion { label: "unstable_features", description: r#"enabling unstable features (deprecated. do not use)"# }, | ||
64 | LintCompletion { label: "unused_crate_dependencies", description: r#"crate dependencies that are never used"# }, | ||
65 | LintCompletion { label: "unused_extern_crates", description: r#"extern crates that are never used"# }, | ||
66 | LintCompletion { label: "unused_import_braces", description: r#"unnecessary braces around an imported item"# }, | ||
67 | LintCompletion { label: "unused_lifetimes", description: r#"detects lifetime parameters that are never used"# }, | ||
68 | LintCompletion { label: "unused_qualifications", description: r#"detects unnecessarily qualified names"# }, | ||
69 | LintCompletion { label: "unused_results", description: r#"unused result of an expression in a statement"# }, | ||
70 | LintCompletion { label: "variant_size_differences", description: r#"detects enums with widely varying variant sizes"# }, | ||
71 | LintCompletion { label: "array_into_iter", description: r#"detects calling `into_iter` on arrays"# }, | ||
72 | LintCompletion { label: "asm_sub_register", description: r#"using only a subset of a register for inline asm inputs"# }, | ||
73 | LintCompletion { label: "bare_trait_objects", description: r#"suggest using `dyn Trait` for trait objects"# }, | ||
74 | LintCompletion { label: "bindings_with_variant_name", description: r#"detects pattern bindings with the same name as one of the matched variants"# }, | ||
75 | LintCompletion { label: "cenum_impl_drop_cast", description: r#"a C-like enum implementing Drop is cast"# }, | ||
76 | LintCompletion { label: "clashing_extern_declarations", description: r#"detects when an extern fn has been declared with the same name but different types"# }, | ||
77 | LintCompletion { label: "coherence_leak_check", description: r#"distinct impls distinguished only by the leak-check code"# }, | ||
78 | LintCompletion { label: "confusable_idents", description: r#"detects visually confusable pairs between identifiers"# }, | ||
79 | LintCompletion { label: "dead_code", description: r#"detect unused, unexported items"# }, | ||
80 | LintCompletion { label: "deprecated", description: r#"detects use of deprecated items"# }, | ||
81 | LintCompletion { label: "ellipsis_inclusive_range_patterns", description: r#"`...` range patterns are deprecated"# }, | ||
82 | LintCompletion { label: "exported_private_dependencies", description: r#"public interface leaks type from a private dependency"# }, | ||
83 | LintCompletion { label: "illegal_floating_point_literal_pattern", description: r#"floating-point literals cannot be used in patterns"# }, | ||
84 | LintCompletion { label: "improper_ctypes", description: r#"proper use of libc types in foreign modules"# }, | ||
85 | LintCompletion { label: "improper_ctypes_definitions", description: r#"proper use of libc types in foreign item definitions"# }, | ||
86 | LintCompletion { label: "incomplete_features", description: r#"incomplete features that may function improperly in some or all cases"# }, | ||
87 | LintCompletion { label: "inline_no_sanitize", description: r#"detects incompatible use of `#[inline(always)]` and `#[no_sanitize(...)]`"# }, | ||
88 | LintCompletion { label: "intra_doc_link_resolution_failure", description: r#"failures in resolving intra-doc link targets"# }, | ||
89 | LintCompletion { label: "invalid_codeblock_attributes", description: r#"codeblock attribute looks a lot like a known one"# }, | ||
90 | LintCompletion { label: "invalid_value", description: r#"an invalid value is being created (such as a NULL reference)"# }, | ||
91 | LintCompletion { label: "irrefutable_let_patterns", description: r#"detects irrefutable patterns in if-let and while-let statements"# }, | ||
92 | LintCompletion { label: "late_bound_lifetime_arguments", description: r#"detects generic lifetime arguments in path segments with late bound lifetime parameters"# }, | ||
93 | LintCompletion { label: "mixed_script_confusables", description: r#"detects Unicode scripts whose mixed script confusables codepoints are solely used"# }, | ||
94 | LintCompletion { label: "mutable_borrow_reservation_conflict", description: r#"reservation of a two-phased borrow conflicts with other shared borrows"# }, | ||
95 | LintCompletion { label: "non_camel_case_types", description: r#"types, variants, traits and type parameters should have camel case names"# }, | ||
96 | LintCompletion { label: "non_shorthand_field_patterns", description: r#"using `Struct { x: x }` instead of `Struct { x }` in a pattern"# }, | ||
97 | LintCompletion { label: "non_snake_case", description: r#"variables, methods, functions, lifetime parameters and modules should have snake case names"# }, | ||
98 | LintCompletion { label: "non_upper_case_globals", description: r#"static constants should have uppercase identifiers"# }, | ||
99 | LintCompletion { label: "no_mangle_generic_items", description: r#"generic items must be mangled"# }, | ||
100 | LintCompletion { label: "overlapping_patterns", description: r#"detects overlapping patterns"# }, | ||
101 | LintCompletion { label: "path_statements", description: r#"path statements with no effect"# }, | ||
102 | LintCompletion { label: "private_in_public", description: r#"detect private items in public interfaces not caught by the old implementation"# }, | ||
103 | LintCompletion { label: "proc_macro_derive_resolution_fallback", description: r#"detects proc macro derives using inaccessible names from parent modules"# }, | ||
104 | LintCompletion { label: "redundant_semicolons", description: r#"detects unnecessary trailing semicolons"# }, | ||
105 | LintCompletion { label: "renamed_and_removed_lints", description: r#"lints that have been renamed or removed"# }, | ||
106 | LintCompletion { label: "safe_packed_borrows", description: r#"safe borrows of fields of packed structs were erroneously allowed"# }, | ||
107 | LintCompletion { label: "stable_features", description: r#"stable features found in `#[feature]` directive"# }, | ||
108 | LintCompletion { label: "trivial_bounds", description: r#"these bounds don't depend on an type parameters"# }, | ||
109 | LintCompletion { label: "type_alias_bounds", description: r#"bounds in type aliases are not enforced"# }, | ||
110 | LintCompletion { label: "tyvar_behind_raw_pointer", description: r#"raw pointer to an inference variable"# }, | ||
111 | LintCompletion { label: "uncommon_codepoints", description: r#"detects uncommon Unicode codepoints in identifiers"# }, | ||
112 | LintCompletion { label: "unconditional_recursion", description: r#"functions that cannot return without calling themselves"# }, | ||
113 | LintCompletion { label: "unknown_lints", description: r#"unrecognized lint attribute"# }, | ||
114 | LintCompletion { label: "unnameable_test_items", description: r#"detects an item that cannot be named being marked as `#[test_case]`"# }, | ||
115 | LintCompletion { label: "unreachable_code", description: r#"detects unreachable code paths"# }, | ||
116 | LintCompletion { label: "unreachable_patterns", description: r#"detects unreachable patterns"# }, | ||
117 | LintCompletion { label: "unstable_name_collisions", description: r#"detects name collision with an existing but unstable method"# }, | ||
118 | LintCompletion { label: "unused_allocation", description: r#"detects unnecessary allocations that can be eliminated"# }, | ||
119 | LintCompletion { label: "unused_assignments", description: r#"detect assignments that will never be read"# }, | ||
120 | LintCompletion { label: "unused_attributes", description: r#"detects attributes that were not used by the compiler"# }, | ||
121 | LintCompletion { label: "unused_braces", description: r#"unnecessary braces around an expression"# }, | ||
122 | LintCompletion { label: "unused_comparisons", description: r#"comparisons made useless by limits of the types involved"# }, | ||
123 | LintCompletion { label: "unused_doc_comments", description: r#"detects doc comments that aren't used by rustdoc"# }, | ||
124 | LintCompletion { label: "unused_features", description: r#"unused features found in crate-level `#[feature]` directives"# }, | ||
125 | LintCompletion { label: "unused_imports", description: r#"imports that are never used"# }, | ||
126 | LintCompletion { label: "unused_labels", description: r#"detects labels that are never used"# }, | ||
127 | LintCompletion { label: "unused_macros", description: r#"detects macros that were not used"# }, | ||
128 | LintCompletion { label: "unused_must_use", description: r#"unused result of a type flagged as `#[must_use]`"# }, | ||
129 | LintCompletion { label: "unused_mut", description: r#"detect mut variables which don't need to be mutable"# }, | ||
130 | LintCompletion { label: "unused_parens", description: r#"`if`, `match`, `while` and `return` do not need parentheses"# }, | ||
131 | LintCompletion { label: "unused_unsafe", description: r#"unnecessary use of an `unsafe` block"# }, | ||
132 | LintCompletion { label: "unused_variables", description: r#"detect variables which are not used in any way"# }, | ||
133 | LintCompletion { label: "warnings", description: r#"mass-change the level for lints which produce warnings"# }, | ||
134 | LintCompletion { label: "where_clauses_object_safety", description: r#"checks the object safety of where clauses"# }, | ||
135 | LintCompletion { label: "while_true", description: r#"suggest using `loop { }` instead of `while true { }`"# }, | ||
136 | LintCompletion { label: "ambiguous_associated_items", description: r#"ambiguous associated items"# }, | ||
137 | LintCompletion { label: "arithmetic_overflow", description: r#"arithmetic operation overflows"# }, | ||
138 | LintCompletion { label: "conflicting_repr_hints", description: r#"conflicts between `#[repr(..)]` hints that were previously accepted and used in practice"# }, | ||
139 | LintCompletion { label: "const_err", description: r#"constant evaluation detected erroneous expression"# }, | ||
140 | LintCompletion { label: "ill_formed_attribute_input", description: r#"ill-formed attribute inputs that were previously accepted and used in practice"# }, | ||
141 | LintCompletion { label: "incomplete_include", description: r#"trailing content in included file"# }, | ||
142 | LintCompletion { label: "invalid_type_param_default", description: r#"type parameter default erroneously allowed in invalid location"# }, | ||
143 | LintCompletion { label: "macro_expanded_macro_exports_accessed_by_absolute_paths", description: r#"macro-expanded `macro_export` macros from the current crate cannot be referred to by absolute paths"# }, | ||
144 | LintCompletion { label: "missing_fragment_specifier", description: r#"detects missing fragment specifiers in unused `macro_rules!` patterns"# }, | ||
145 | LintCompletion { label: "mutable_transmutes", description: r#"mutating transmuted &mut T from &T may cause undefined behavior"# }, | ||
146 | LintCompletion { label: "no_mangle_const_items", description: r#"const items will not have their symbols exported"# }, | ||
147 | LintCompletion { label: "order_dependent_trait_objects", description: r#"trait-object types were treated as different depending on marker-trait order"# }, | ||
148 | LintCompletion { label: "overflowing_literals", description: r#"literal out of range for its type"# }, | ||
149 | LintCompletion { label: "patterns_in_fns_without_body", description: r#"patterns in functions without body were erroneously allowed"# }, | ||
150 | LintCompletion { label: "pub_use_of_private_extern_crate", description: r#"detect public re-exports of private extern crates"# }, | ||
151 | LintCompletion { label: "soft_unstable", description: r#"a feature gate that doesn't break dependent crates"# }, | ||
152 | LintCompletion { label: "unconditional_panic", description: r#"operation will cause a panic at runtime"# }, | ||
153 | LintCompletion { label: "unknown_crate_types", description: r#"unknown crate type found in `#[crate_type]` directive"# }, | ||
154 | ]; | ||
155 | |||
156 | #[cfg(test)] | ||
157 | mod tests { | ||
158 | |||
159 | use crate::test_utils::check_edit; | ||
160 | |||
161 | #[test] | ||
162 | fn check_empty() { | ||
163 | check_edit( | ||
164 | "deprecated", | ||
165 | r#"#[allow($0)] struct Test;"#, | ||
166 | r#"#[allow(deprecated)] struct Test;"#, | ||
167 | ) | ||
168 | } | ||
169 | |||
170 | #[test] | ||
171 | fn check_with_existing() { | ||
172 | check_edit( | ||
173 | "deprecated", | ||
174 | r#"#[allow(keyword_idents, $0)] struct Test;"#, | ||
175 | r#"#[allow(keyword_idents, deprecated)] struct Test;"#, | ||
176 | ) | ||
177 | } | ||
178 | |||
179 | #[test] | ||
180 | fn check_qualified() { | ||
181 | check_edit( | ||
182 | "deprecated", | ||
183 | r#"#[allow(keyword_idents, $0)] struct Test;"#, | ||
184 | r#"#[allow(keyword_idents, deprecated)] struct Test;"#, | ||
185 | ) | ||
186 | } | ||
187 | } | ||
diff --git a/crates/ide_completion/src/completions/flyimport.rs b/crates/ide_completion/src/completions/flyimport.rs index 9d5b61562..df27e7a84 100644 --- a/crates/ide_completion/src/completions/flyimport.rs +++ b/crates/ide_completion/src/completions/flyimport.rs | |||
@@ -111,11 +111,9 @@ pub(crate) fn import_on_the_fly(acc: &mut Completions, ctx: &CompletionContext) | |||
111 | return None; | 111 | return None; |
112 | } | 112 | } |
113 | if ctx.use_item_syntax.is_some() | 113 | if ctx.use_item_syntax.is_some() |
114 | || ctx.attribute_under_caret.is_some() | 114 | || ctx.is_path_disallowed() |
115 | || ctx.mod_declaration_under_caret.is_some() | 115 | || ctx.expects_item() |
116 | || ctx.record_lit_syntax.is_some() | 116 | || ctx.expects_assoc_item() |
117 | || ctx.has_trait_parent | ||
118 | || ctx.has_impl_parent | ||
119 | { | 117 | { |
120 | return None; | 118 | return None; |
121 | } | 119 | } |
diff --git a/crates/ide_completion/src/completions/keyword.rs b/crates/ide_completion/src/completions/keyword.rs index 61b667104..e71a04b6e 100644 --- a/crates/ide_completion/src/completions/keyword.rs +++ b/crates/ide_completion/src/completions/keyword.rs | |||
@@ -2,7 +2,7 @@ | |||
2 | 2 | ||
3 | use std::iter; | 3 | use std::iter; |
4 | 4 | ||
5 | use syntax::SyntaxKind; | 5 | use syntax::{SyntaxKind, T}; |
6 | 6 | ||
7 | use crate::{CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions}; | 7 | use crate::{CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions}; |
8 | 8 | ||
@@ -48,107 +48,102 @@ pub(crate) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte | |||
48 | cov_mark::hit!(no_keyword_completion_in_record_lit); | 48 | cov_mark::hit!(no_keyword_completion_in_record_lit); |
49 | return; | 49 | return; |
50 | } | 50 | } |
51 | let mut add_keyword = |kw, snippet| add_keyword(ctx, acc, kw, snippet); | ||
51 | 52 | ||
52 | let has_trait_or_impl_parent = ctx.has_impl_parent || ctx.has_trait_parent; | 53 | let expects_assoc_item = ctx.expects_assoc_item(); |
53 | if ctx.trait_as_prev_sibling || ctx.impl_as_prev_sibling { | 54 | let has_block_expr_parent = ctx.has_block_expr_parent(); |
54 | add_keyword(ctx, acc, "where", "where "); | 55 | let expects_item = ctx.expects_item(); |
56 | |||
57 | if ctx.has_impl_or_trait_prev_sibling() { | ||
58 | // FIXME this also incorrectly shows up after a complete trait/impl | ||
59 | add_keyword("where", "where "); | ||
55 | return; | 60 | return; |
56 | } | 61 | } |
57 | if ctx.unsafe_is_prev { | 62 | if ctx.previous_token_is(T![unsafe]) { |
58 | if ctx.has_item_list_or_source_file_parent || ctx.block_expr_parent { | 63 | if expects_item || expects_assoc_item || has_block_expr_parent { |
59 | add_keyword(ctx, acc, "fn", "fn $0() {}") | 64 | add_keyword("fn", "fn $1($2) {\n $0\n}") |
60 | } | 65 | } |
61 | 66 | ||
62 | if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { | 67 | if expects_item || has_block_expr_parent { |
63 | add_keyword(ctx, acc, "trait", "trait $0 {}"); | 68 | add_keyword("trait", "trait $1 {\n $0\n}"); |
64 | add_keyword(ctx, acc, "impl", "impl $0 {}"); | 69 | add_keyword("impl", "impl $1 {\n $0\n}"); |
65 | } | 70 | } |
66 | 71 | ||
67 | return; | 72 | return; |
68 | } | 73 | } |
69 | if ctx.has_item_list_or_source_file_parent || has_trait_or_impl_parent || ctx.block_expr_parent | ||
70 | { | ||
71 | add_keyword(ctx, acc, "fn", "fn $0() {}"); | ||
72 | } | ||
73 | if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { | ||
74 | add_keyword(ctx, acc, "use", "use "); | ||
75 | add_keyword(ctx, acc, "impl", "impl $0 {}"); | ||
76 | add_keyword(ctx, acc, "trait", "trait $0 {}"); | ||
77 | } | ||
78 | 74 | ||
79 | if ctx.has_item_list_or_source_file_parent { | 75 | if expects_item || ctx.expects_non_trait_assoc_item() || ctx.expect_record_field() { |
80 | add_keyword(ctx, acc, "enum", "enum $0 {}"); | 76 | add_keyword("pub(crate)", "pub(crate) "); |
81 | add_keyword(ctx, acc, "struct", "struct $0"); | 77 | add_keyword("pub", "pub "); |
82 | add_keyword(ctx, acc, "union", "union $0 {}"); | ||
83 | } | 78 | } |
84 | 79 | ||
85 | if ctx.is_expr { | 80 | if expects_item || expects_assoc_item || has_block_expr_parent || ctx.is_match_arm { |
86 | add_keyword(ctx, acc, "match", "match $0 {}"); | 81 | add_keyword("unsafe", "unsafe "); |
87 | add_keyword(ctx, acc, "while", "while $0 {}"); | ||
88 | add_keyword(ctx, acc, "while let", "while let $1 = $0 {}"); | ||
89 | add_keyword(ctx, acc, "loop", "loop {$0}"); | ||
90 | add_keyword(ctx, acc, "if", "if $0 {}"); | ||
91 | add_keyword(ctx, acc, "if let", "if let $1 = $0 {}"); | ||
92 | add_keyword(ctx, acc, "for", "for $1 in $0 {}"); | ||
93 | } | 82 | } |
94 | 83 | ||
95 | if ctx.if_is_prev || ctx.block_expr_parent { | 84 | if expects_item || expects_assoc_item || has_block_expr_parent { |
96 | add_keyword(ctx, acc, "let", "let "); | 85 | add_keyword("fn", "fn $1($2) {\n $0\n}"); |
86 | add_keyword("const", "const $0"); | ||
87 | add_keyword("type", "type $0"); | ||
97 | } | 88 | } |
98 | 89 | ||
99 | if ctx.after_if { | 90 | if expects_item || has_block_expr_parent { |
100 | add_keyword(ctx, acc, "else", "else {$0}"); | 91 | add_keyword("use", "use $0"); |
101 | add_keyword(ctx, acc, "else if", "else if $0 {}"); | 92 | add_keyword("impl", "impl $1 {\n $0\n}"); |
93 | add_keyword("trait", "trait $1 {\n $0\n}"); | ||
94 | add_keyword("static", "static $0"); | ||
95 | add_keyword("extern", "extern $0"); | ||
96 | add_keyword("mod", "mod $0"); | ||
102 | } | 97 | } |
103 | if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { | 98 | |
104 | add_keyword(ctx, acc, "mod", "mod $0"); | 99 | if expects_item { |
100 | add_keyword("enum", "enum $1 {\n $0\n}"); | ||
101 | add_keyword("struct", "struct $0"); | ||
102 | add_keyword("union", "union $1 {\n $0\n}"); | ||
105 | } | 103 | } |
106 | if ctx.bind_pat_parent || ctx.ref_pat_parent { | 104 | |
107 | add_keyword(ctx, acc, "mut", "mut "); | 105 | if ctx.expects_expression() { |
106 | add_keyword("match", "match $1 {\n $0\n}"); | ||
107 | add_keyword("while", "while $1 {\n $0\n}"); | ||
108 | add_keyword("while let", "while let $1 = $2 {\n $0\n}"); | ||
109 | add_keyword("loop", "loop {\n $0\n}"); | ||
110 | add_keyword("if", "if $1 {\n $0\n}"); | ||
111 | add_keyword("if let", "if let $1 = $2 {\n $0\n}"); | ||
112 | add_keyword("for", "for $1 in $2 {\n $0\n}"); | ||
108 | } | 113 | } |
109 | if ctx.has_item_list_or_source_file_parent || has_trait_or_impl_parent || ctx.block_expr_parent | 114 | |
110 | { | 115 | if ctx.previous_token_is(T![if]) || ctx.previous_token_is(T![while]) || has_block_expr_parent { |
111 | add_keyword(ctx, acc, "const", "const "); | 116 | add_keyword("let", "let "); |
112 | add_keyword(ctx, acc, "type", "type "); | ||
113 | } | 117 | } |
114 | if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { | 118 | |
115 | add_keyword(ctx, acc, "static", "static "); | 119 | if ctx.after_if() { |
116 | }; | 120 | add_keyword("else", "else {\n $0\n}"); |
117 | if (ctx.has_item_list_or_source_file_parent) || ctx.block_expr_parent { | 121 | add_keyword("else if", "else if $1 {\n $0\n}"); |
118 | add_keyword(ctx, acc, "extern", "extern "); | ||
119 | } | 122 | } |
120 | if ctx.has_item_list_or_source_file_parent | 123 | |
121 | || has_trait_or_impl_parent | 124 | if ctx.expects_ident_pat_or_ref_expr() { |
122 | || ctx.block_expr_parent | 125 | add_keyword("mut", "mut "); |
123 | || ctx.is_match_arm | ||
124 | { | ||
125 | add_keyword(ctx, acc, "unsafe", "unsafe "); | ||
126 | } | 126 | } |
127 | |||
127 | if ctx.in_loop_body { | 128 | if ctx.in_loop_body { |
128 | if ctx.can_be_stmt { | 129 | if ctx.can_be_stmt { |
129 | add_keyword(ctx, acc, "continue", "continue;"); | 130 | add_keyword("continue", "continue;"); |
130 | add_keyword(ctx, acc, "break", "break;"); | 131 | add_keyword("break", "break;"); |
131 | } else { | 132 | } else { |
132 | add_keyword(ctx, acc, "continue", "continue"); | 133 | add_keyword("continue", "continue"); |
133 | add_keyword(ctx, acc, "break", "break"); | 134 | add_keyword("break", "break"); |
134 | } | 135 | } |
135 | } | 136 | } |
136 | if ctx.has_item_list_or_source_file_parent || ctx.has_impl_parent | ctx.has_field_list_parent { | ||
137 | add_keyword(ctx, acc, "pub(crate)", "pub(crate) "); | ||
138 | add_keyword(ctx, acc, "pub", "pub "); | ||
139 | } | ||
140 | 137 | ||
141 | if !ctx.is_trivial_path { | 138 | if !ctx.is_trivial_path { |
142 | return; | 139 | return; |
143 | } | 140 | } |
144 | let fn_def = match &ctx.function_syntax { | 141 | let fn_def = match &ctx.function_def { |
145 | Some(it) => it, | 142 | Some(it) => it, |
146 | None => return, | 143 | None => return, |
147 | }; | 144 | }; |
148 | 145 | ||
149 | add_keyword( | 146 | add_keyword( |
150 | ctx, | ||
151 | acc, | ||
152 | "return", | 147 | "return", |
153 | match (ctx.can_be_stmt, fn_def.ret_type().is_some()) { | 148 | match (ctx.can_be_stmt, fn_def.ret_type().is_some()) { |
154 | (true, true) => "return $0;", | 149 | (true, true) => "return $0;", |
@@ -165,15 +160,12 @@ fn add_keyword(ctx: &CompletionContext, acc: &mut Completions, kw: &str, snippet | |||
165 | 160 | ||
166 | match ctx.config.snippet_cap { | 161 | match ctx.config.snippet_cap { |
167 | Some(cap) => { | 162 | Some(cap) => { |
168 | let tmp; | 163 | if snippet.ends_with('}') && ctx.incomplete_let { |
169 | let snippet = if snippet.ends_with('}') && ctx.incomplete_let { | ||
170 | cov_mark::hit!(let_semi); | 164 | cov_mark::hit!(let_semi); |
171 | tmp = format!("{};", snippet); | 165 | item.insert_snippet(cap, format!("{};", snippet)); |
172 | &tmp | ||
173 | } else { | 166 | } else { |
174 | snippet | 167 | item.insert_snippet(cap, snippet); |
175 | }; | 168 | } |
176 | item.insert_snippet(cap, snippet); | ||
177 | } | 169 | } |
178 | None => { | 170 | None => { |
179 | item.insert_text(if snippet.contains('$') { kw } else { snippet }); | 171 | item.insert_text(if snippet.contains('$') { kw } else { snippet }); |
@@ -236,21 +228,21 @@ mod tests { | |||
236 | check( | 228 | check( |
237 | r"m$0", | 229 | r"m$0", |
238 | expect![[r#" | 230 | expect![[r#" |
231 | kw pub(crate) | ||
232 | kw pub | ||
233 | kw unsafe | ||
239 | kw fn | 234 | kw fn |
235 | kw const | ||
236 | kw type | ||
240 | kw use | 237 | kw use |
241 | kw impl | 238 | kw impl |
242 | kw trait | 239 | kw trait |
240 | kw static | ||
241 | kw extern | ||
242 | kw mod | ||
243 | kw enum | 243 | kw enum |
244 | kw struct | 244 | kw struct |
245 | kw union | 245 | kw union |
246 | kw mod | ||
247 | kw const | ||
248 | kw type | ||
249 | kw static | ||
250 | kw extern | ||
251 | kw unsafe | ||
252 | kw pub(crate) | ||
253 | kw pub | ||
254 | "#]], | 246 | "#]], |
255 | ); | 247 | ); |
256 | } | 248 | } |
@@ -260,10 +252,16 @@ mod tests { | |||
260 | check( | 252 | check( |
261 | r"fn quux() { $0 }", | 253 | r"fn quux() { $0 }", |
262 | expect![[r#" | 254 | expect![[r#" |
255 | kw unsafe | ||
263 | kw fn | 256 | kw fn |
257 | kw const | ||
258 | kw type | ||
264 | kw use | 259 | kw use |
265 | kw impl | 260 | kw impl |
266 | kw trait | 261 | kw trait |
262 | kw static | ||
263 | kw extern | ||
264 | kw mod | ||
267 | kw match | 265 | kw match |
268 | kw while | 266 | kw while |
269 | kw while let | 267 | kw while let |
@@ -272,12 +270,6 @@ mod tests { | |||
272 | kw if let | 270 | kw if let |
273 | kw for | 271 | kw for |
274 | kw let | 272 | kw let |
275 | kw mod | ||
276 | kw const | ||
277 | kw type | ||
278 | kw static | ||
279 | kw extern | ||
280 | kw unsafe | ||
281 | kw return | 273 | kw return |
282 | "#]], | 274 | "#]], |
283 | ); | 275 | ); |
@@ -288,10 +280,16 @@ mod tests { | |||
288 | check( | 280 | check( |
289 | r"fn quux() { if true { $0 } }", | 281 | r"fn quux() { if true { $0 } }", |
290 | expect![[r#" | 282 | expect![[r#" |
283 | kw unsafe | ||
291 | kw fn | 284 | kw fn |
285 | kw const | ||
286 | kw type | ||
292 | kw use | 287 | kw use |
293 | kw impl | 288 | kw impl |
294 | kw trait | 289 | kw trait |
290 | kw static | ||
291 | kw extern | ||
292 | kw mod | ||
295 | kw match | 293 | kw match |
296 | kw while | 294 | kw while |
297 | kw while let | 295 | kw while let |
@@ -300,12 +298,6 @@ mod tests { | |||
300 | kw if let | 298 | kw if let |
301 | kw for | 299 | kw for |
302 | kw let | 300 | kw let |
303 | kw mod | ||
304 | kw const | ||
305 | kw type | ||
306 | kw static | ||
307 | kw extern | ||
308 | kw unsafe | ||
309 | kw return | 301 | kw return |
310 | "#]], | 302 | "#]], |
311 | ); | 303 | ); |
@@ -316,10 +308,16 @@ mod tests { | |||
316 | check( | 308 | check( |
317 | r#"fn quux() { if true { () } $0 }"#, | 309 | r#"fn quux() { if true { () } $0 }"#, |
318 | expect![[r#" | 310 | expect![[r#" |
311 | kw unsafe | ||
319 | kw fn | 312 | kw fn |
313 | kw const | ||
314 | kw type | ||
320 | kw use | 315 | kw use |
321 | kw impl | 316 | kw impl |
322 | kw trait | 317 | kw trait |
318 | kw static | ||
319 | kw extern | ||
320 | kw mod | ||
323 | kw match | 321 | kw match |
324 | kw while | 322 | kw while |
325 | kw while let | 323 | kw while let |
@@ -330,19 +328,15 @@ mod tests { | |||
330 | kw let | 328 | kw let |
331 | kw else | 329 | kw else |
332 | kw else if | 330 | kw else if |
333 | kw mod | ||
334 | kw const | ||
335 | kw type | ||
336 | kw static | ||
337 | kw extern | ||
338 | kw unsafe | ||
339 | kw return | 331 | kw return |
340 | "#]], | 332 | "#]], |
341 | ); | 333 | ); |
342 | check_edit( | 334 | check_edit( |
343 | "else", | 335 | "else", |
344 | r#"fn quux() { if true { () } $0 }"#, | 336 | r#"fn quux() { if true { () } $0 }"#, |
345 | r#"fn quux() { if true { () } else {$0} }"#, | 337 | r#"fn quux() { if true { () } else { |
338 | $0 | ||
339 | } }"#, | ||
346 | ); | 340 | ); |
347 | } | 341 | } |
348 | 342 | ||
@@ -355,6 +349,7 @@ fn quux() -> i32 { | |||
355 | } | 349 | } |
356 | "#, | 350 | "#, |
357 | expect![[r#" | 351 | expect![[r#" |
352 | kw unsafe | ||
358 | kw match | 353 | kw match |
359 | kw while | 354 | kw while |
360 | kw while let | 355 | kw while let |
@@ -362,7 +357,6 @@ fn quux() -> i32 { | |||
362 | kw if | 357 | kw if |
363 | kw if let | 358 | kw if let |
364 | kw for | 359 | kw for |
365 | kw unsafe | ||
366 | kw return | 360 | kw return |
367 | "#]], | 361 | "#]], |
368 | ); | 362 | ); |
@@ -373,10 +367,10 @@ fn quux() -> i32 { | |||
373 | check( | 367 | check( |
374 | r"trait My { $0 }", | 368 | r"trait My { $0 }", |
375 | expect![[r#" | 369 | expect![[r#" |
370 | kw unsafe | ||
376 | kw fn | 371 | kw fn |
377 | kw const | 372 | kw const |
378 | kw type | 373 | kw type |
379 | kw unsafe | ||
380 | "#]], | 374 | "#]], |
381 | ); | 375 | ); |
382 | } | 376 | } |
@@ -386,12 +380,27 @@ fn quux() -> i32 { | |||
386 | check( | 380 | check( |
387 | r"impl My { $0 }", | 381 | r"impl My { $0 }", |
388 | expect![[r#" | 382 | expect![[r#" |
383 | kw pub(crate) | ||
384 | kw pub | ||
385 | kw unsafe | ||
389 | kw fn | 386 | kw fn |
390 | kw const | 387 | kw const |
391 | kw type | 388 | kw type |
392 | kw unsafe | 389 | "#]], |
390 | ); | ||
391 | } | ||
392 | |||
393 | #[test] | ||
394 | fn test_keywords_in_impl_def_with_attr() { | ||
395 | check( | ||
396 | r"impl My { #[foo] $0 }", | ||
397 | expect![[r#" | ||
393 | kw pub(crate) | 398 | kw pub(crate) |
394 | kw pub | 399 | kw pub |
400 | kw unsafe | ||
401 | kw fn | ||
402 | kw const | ||
403 | kw type | ||
395 | "#]], | 404 | "#]], |
396 | ); | 405 | ); |
397 | } | 406 | } |
@@ -401,10 +410,16 @@ fn quux() -> i32 { | |||
401 | check( | 410 | check( |
402 | r"fn my() { loop { $0 } }", | 411 | r"fn my() { loop { $0 } }", |
403 | expect![[r#" | 412 | expect![[r#" |
413 | kw unsafe | ||
404 | kw fn | 414 | kw fn |
415 | kw const | ||
416 | kw type | ||
405 | kw use | 417 | kw use |
406 | kw impl | 418 | kw impl |
407 | kw trait | 419 | kw trait |
420 | kw static | ||
421 | kw extern | ||
422 | kw mod | ||
408 | kw match | 423 | kw match |
409 | kw while | 424 | kw while |
410 | kw while let | 425 | kw while let |
@@ -413,12 +428,6 @@ fn quux() -> i32 { | |||
413 | kw if let | 428 | kw if let |
414 | kw for | 429 | kw for |
415 | kw let | 430 | kw let |
416 | kw mod | ||
417 | kw const | ||
418 | kw type | ||
419 | kw static | ||
420 | kw extern | ||
421 | kw unsafe | ||
422 | kw continue | 431 | kw continue |
423 | kw break | 432 | kw break |
424 | kw return | 433 | kw return |
@@ -646,7 +655,9 @@ fn foo() { | |||
646 | fn main() { let x = $0 } | 655 | fn main() { let x = $0 } |
647 | "#, | 656 | "#, |
648 | r#" | 657 | r#" |
649 | fn main() { let x = match $0 {}; } | 658 | fn main() { let x = match $1 { |
659 | $0 | ||
660 | }; } | ||
650 | "#, | 661 | "#, |
651 | ); | 662 | ); |
652 | 663 | ||
@@ -660,7 +671,9 @@ fn main() { | |||
660 | "#, | 671 | "#, |
661 | r#" | 672 | r#" |
662 | fn main() { | 673 | fn main() { |
663 | let x = if $0 {}; | 674 | let x = if $1 { |
675 | $0 | ||
676 | }; | ||
664 | let y = 92; | 677 | let y = 92; |
665 | } | 678 | } |
666 | "#, | 679 | "#, |
@@ -676,7 +689,9 @@ fn main() { | |||
676 | "#, | 689 | "#, |
677 | r#" | 690 | r#" |
678 | fn main() { | 691 | fn main() { |
679 | let x = loop {$0}; | 692 | let x = loop { |
693 | $0 | ||
694 | }; | ||
680 | bar(); | 695 | bar(); |
681 | } | 696 | } |
682 | "#, | 697 | "#, |
diff --git a/crates/ide_completion/src/completions/lifetime.rs b/crates/ide_completion/src/completions/lifetime.rs index 5eeddf7a4..5f6285b84 100644 --- a/crates/ide_completion/src/completions/lifetime.rs +++ b/crates/ide_completion/src/completions/lifetime.rs | |||
@@ -8,19 +8,24 @@ pub(crate) fn complete_lifetime(acc: &mut Completions, ctx: &CompletionContext) | |||
8 | if !ctx.lifetime_allowed { | 8 | if !ctx.lifetime_allowed { |
9 | return; | 9 | return; |
10 | } | 10 | } |
11 | let lp_string; | ||
11 | let param_lifetime = match ( | 12 | let param_lifetime = match ( |
12 | &ctx.lifetime_syntax, | 13 | &ctx.lifetime_syntax, |
13 | ctx.lifetime_param_syntax.as_ref().and_then(|lp| lp.lifetime()), | 14 | ctx.lifetime_param_syntax.as_ref().and_then(|lp| lp.lifetime()), |
14 | ) { | 15 | ) { |
15 | (Some(lt), Some(lp)) if lp == lt.clone() => return, | 16 | (Some(lt), Some(lp)) if lp == lt.clone() => return, |
16 | (Some(_), Some(lp)) => Some(lp.to_string()), | 17 | (Some(_), Some(lp)) => { |
18 | lp_string = lp.to_string(); | ||
19 | Some(&lp_string) | ||
20 | } | ||
17 | _ => None, | 21 | _ => None, |
18 | }; | 22 | }; |
19 | 23 | ||
20 | ctx.scope.process_all_names(&mut |name, res| { | 24 | ctx.scope.process_all_names(&mut |name, res| { |
21 | if let ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_)) = res { | 25 | if let ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_)) = res { |
22 | if param_lifetime != Some(name.to_string()) { | 26 | let name = name.to_string(); |
23 | acc.add_resolution(ctx, name.to_string(), &res); | 27 | if param_lifetime != Some(&name) { |
28 | acc.add_resolution(ctx, name, &res); | ||
24 | } | 29 | } |
25 | } | 30 | } |
26 | }); | 31 | }); |
diff --git a/crates/ide_completion/src/completions/macro_in_item_position.rs b/crates/ide_completion/src/completions/macro_in_item_position.rs index 2be299ac2..ec57aee30 100644 --- a/crates/ide_completion/src/completions/macro_in_item_position.rs +++ b/crates/ide_completion/src/completions/macro_in_item_position.rs | |||
@@ -2,15 +2,22 @@ | |||
2 | 2 | ||
3 | use crate::{CompletionContext, Completions}; | 3 | use crate::{CompletionContext, Completions}; |
4 | 4 | ||
5 | // Ideally this should be removed and moved into `(un)qualified_path` respectively | ||
5 | pub(crate) fn complete_macro_in_item_position(acc: &mut Completions, ctx: &CompletionContext) { | 6 | pub(crate) fn complete_macro_in_item_position(acc: &mut Completions, ctx: &CompletionContext) { |
6 | // Show only macros in top level. | 7 | // Show only macros in top level. |
7 | if ctx.is_new_item { | 8 | if !ctx.is_new_item { |
8 | ctx.scope.process_all_names(&mut |name, res| { | 9 | return; |
9 | if let hir::ScopeDef::MacroDef(mac) = res { | ||
10 | acc.add_macro(ctx, Some(name.to_string()), mac); | ||
11 | } | ||
12 | }) | ||
13 | } | 10 | } |
11 | |||
12 | ctx.scope.process_all_names(&mut |name, res| { | ||
13 | if let hir::ScopeDef::MacroDef(mac) = res { | ||
14 | acc.add_macro(ctx, Some(name.to_string()), mac); | ||
15 | } | ||
16 | // FIXME: This should be done in qualified_path/unqualified_path instead? | ||
17 | if let hir::ScopeDef::ModuleDef(hir::ModuleDef::Module(_)) = res { | ||
18 | acc.add_resolution(ctx, name.to_string(), &res); | ||
19 | } | ||
20 | }) | ||
14 | } | 21 | } |
15 | 22 | ||
16 | #[cfg(test)] | 23 | #[cfg(test)] |
diff --git a/crates/ide_completion/src/completions/pattern.rs b/crates/ide_completion/src/completions/pattern.rs index 8dc9ab73c..b84e9a967 100644 --- a/crates/ide_completion/src/completions/pattern.rs +++ b/crates/ide_completion/src/completions/pattern.rs | |||
@@ -1,17 +1,15 @@ | |||
1 | //! Completes constants and paths in patterns. | 1 | //! Completes constants and paths in patterns. |
2 | 2 | ||
3 | use crate::{CompletionContext, Completions}; | 3 | use crate::{context::PatternRefutability, CompletionContext, Completions}; |
4 | 4 | ||
5 | /// Completes constants and paths in patterns. | 5 | /// Completes constants and paths in patterns. |
6 | pub(crate) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) { | 6 | pub(crate) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) { |
7 | if !(ctx.is_pat_binding_or_const || ctx.is_irrefutable_pat_binding) { | 7 | let refutable = match ctx.is_pat_or_const { |
8 | return; | 8 | Some(it) => it == PatternRefutability::Refutable, |
9 | } | 9 | None => return, |
10 | if ctx.record_pat_syntax.is_some() { | 10 | }; |
11 | return; | ||
12 | } | ||
13 | 11 | ||
14 | if !ctx.is_irrefutable_pat_binding { | 12 | if refutable { |
15 | if let Some(hir::Adt::Enum(e)) = | 13 | if let Some(hir::Adt::Enum(e)) = |
16 | ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt()) | 14 | ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt()) |
17 | { | 15 | { |
@@ -31,14 +29,14 @@ pub(crate) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) { | |||
31 | acc.add_struct_pat(ctx, *strukt, Some(name.clone())); | 29 | acc.add_struct_pat(ctx, *strukt, Some(name.clone())); |
32 | true | 30 | true |
33 | } | 31 | } |
34 | hir::ModuleDef::Variant(variant) if !ctx.is_irrefutable_pat_binding => { | 32 | hir::ModuleDef::Variant(variant) if refutable => { |
35 | acc.add_variant_pat(ctx, *variant, Some(name.clone())); | 33 | acc.add_variant_pat(ctx, *variant, Some(name.clone())); |
36 | true | 34 | true |
37 | } | 35 | } |
38 | hir::ModuleDef::Adt(hir::Adt::Enum(..)) | 36 | hir::ModuleDef::Adt(hir::Adt::Enum(..)) |
39 | | hir::ModuleDef::Variant(..) | 37 | | hir::ModuleDef::Variant(..) |
40 | | hir::ModuleDef::Const(..) | 38 | | hir::ModuleDef::Const(..) |
41 | | hir::ModuleDef::Module(..) => !ctx.is_irrefutable_pat_binding, | 39 | | hir::ModuleDef::Module(..) => refutable, |
42 | _ => false, | 40 | _ => false, |
43 | }, | 41 | }, |
44 | hir::ScopeDef::MacroDef(_) => true, | 42 | hir::ScopeDef::MacroDef(_) => true, |
@@ -47,7 +45,7 @@ pub(crate) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) { | |||
47 | acc.add_struct_pat(ctx, strukt, Some(name.clone())); | 45 | acc.add_struct_pat(ctx, strukt, Some(name.clone())); |
48 | true | 46 | true |
49 | } | 47 | } |
50 | Some(hir::Adt::Enum(_)) => !ctx.is_irrefutable_pat_binding, | 48 | Some(hir::Adt::Enum(_)) => refutable, |
51 | _ => true, | 49 | _ => true, |
52 | }, | 50 | }, |
53 | _ => false, | 51 | _ => false, |
@@ -402,4 +400,31 @@ impl Foo { | |||
402 | "#]], | 400 | "#]], |
403 | ) | 401 | ) |
404 | } | 402 | } |
403 | |||
404 | #[test] | ||
405 | fn completes_in_record_field_pat() { | ||
406 | check_snippet( | ||
407 | r#" | ||
408 | struct Foo { bar: Bar } | ||
409 | struct Bar(u32); | ||
410 | fn outer(Foo { bar: $0 }: Foo) {} | ||
411 | "#, | ||
412 | expect![[r#" | ||
413 | bn Foo Foo { bar$1 }$0 | ||
414 | bn Bar Bar($1)$0 | ||
415 | "#]], | ||
416 | ) | ||
417 | } | ||
418 | |||
419 | #[test] | ||
420 | fn skips_in_record_field_pat_name() { | ||
421 | check_snippet( | ||
422 | r#" | ||
423 | struct Foo { bar: Bar } | ||
424 | struct Bar(u32); | ||
425 | fn outer(Foo { bar$0 }: Foo) {} | ||
426 | "#, | ||
427 | expect![[r#""#]], | ||
428 | ) | ||
429 | } | ||
405 | } | 430 | } |
diff --git a/crates/ide_completion/src/completions/qualified_path.rs b/crates/ide_completion/src/completions/qualified_path.rs index eedb44873..7a0e1ead3 100644 --- a/crates/ide_completion/src/completions/qualified_path.rs +++ b/crates/ide_completion/src/completions/qualified_path.rs | |||
@@ -7,21 +7,33 @@ use syntax::AstNode; | |||
7 | use crate::{CompletionContext, Completions}; | 7 | use crate::{CompletionContext, Completions}; |
8 | 8 | ||
9 | pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionContext) { | 9 | pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionContext) { |
10 | if ctx.is_path_disallowed() || ctx.expects_item() { | ||
11 | return; | ||
12 | } | ||
10 | let path = match &ctx.path_qual { | 13 | let path = match &ctx.path_qual { |
11 | Some(path) => path.clone(), | 14 | Some(path) => path.clone(), |
12 | None => return, | 15 | None => return, |
13 | }; | 16 | }; |
14 | 17 | ||
15 | if ctx.attribute_under_caret.is_some() || ctx.mod_declaration_under_caret.is_some() { | ||
16 | return; | ||
17 | } | ||
18 | |||
19 | let context_module = ctx.scope.module(); | ||
20 | |||
21 | let resolution = match ctx.sema.resolve_path(&path) { | 18 | let resolution = match ctx.sema.resolve_path(&path) { |
22 | Some(res) => res, | 19 | Some(res) => res, |
23 | None => return, | 20 | None => return, |
24 | }; | 21 | }; |
22 | let context_module = ctx.scope.module(); | ||
23 | if ctx.expects_assoc_item() { | ||
24 | if let PathResolution::Def(hir::ModuleDef::Module(module)) = resolution { | ||
25 | let module_scope = module.scope(ctx.db, context_module); | ||
26 | for (name, def) in module_scope { | ||
27 | if let ScopeDef::MacroDef(macro_def) = def { | ||
28 | acc.add_macro(ctx, Some(name.to_string()), macro_def); | ||
29 | } | ||
30 | if let ScopeDef::ModuleDef(hir::ModuleDef::Module(_)) = def { | ||
31 | acc.add_resolution(ctx, name.to_string(), &def); | ||
32 | } | ||
33 | } | ||
34 | } | ||
35 | return; | ||
36 | } | ||
25 | 37 | ||
26 | // Add associated types on type parameters and `Self`. | 38 | // Add associated types on type parameters and `Self`. |
27 | resolution.assoc_type_shorthand_candidates(ctx.db, |_, alias| { | 39 | resolution.assoc_type_shorthand_candidates(ctx.db, |_, alias| { |
@@ -596,7 +608,7 @@ fn main() { T::$0; } | |||
596 | macro_rules! foo { () => {} } | 608 | macro_rules! foo { () => {} } |
597 | 609 | ||
598 | fn main() { let _ = crate::$0 } | 610 | fn main() { let _ = crate::$0 } |
599 | "#, | 611 | "#, |
600 | expect![[r##" | 612 | expect![[r##" |
601 | fn main() fn() | 613 | fn main() fn() |
602 | ma foo!(…) #[macro_export] macro_rules! foo | 614 | ma foo!(…) #[macro_export] macro_rules! foo |
@@ -605,6 +617,44 @@ fn main() { let _ = crate::$0 } | |||
605 | } | 617 | } |
606 | 618 | ||
607 | #[test] | 619 | #[test] |
620 | fn completes_in_assoc_item_list() { | ||
621 | check( | ||
622 | r#" | ||
623 | #[macro_export] | ||
624 | macro_rules! foo { () => {} } | ||
625 | mod bar {} | ||
626 | |||
627 | struct MyStruct {} | ||
628 | impl MyStruct { | ||
629 | crate::$0 | ||
630 | } | ||
631 | "#, | ||
632 | expect![[r##" | ||
633 | md bar | ||
634 | ma foo! #[macro_export] macro_rules! foo | ||
635 | "##]], | ||
636 | ); | ||
637 | } | ||
638 | |||
639 | #[test] | ||
640 | #[ignore] // FIXME doesn't complete anything atm | ||
641 | fn completes_in_item_list() { | ||
642 | check( | ||
643 | r#" | ||
644 | struct MyStruct {} | ||
645 | macro_rules! foo {} | ||
646 | mod bar {} | ||
647 | |||
648 | crate::$0 | ||
649 | "#, | ||
650 | expect![[r#" | ||
651 | md bar | ||
652 | ma foo! macro_rules! foo | ||
653 | "#]], | ||
654 | ) | ||
655 | } | ||
656 | |||
657 | #[test] | ||
608 | fn test_super_super_completion() { | 658 | fn test_super_super_completion() { |
609 | check( | 659 | check( |
610 | r#" | 660 | r#" |
diff --git a/crates/ide_completion/src/completions/record.rs b/crates/ide_completion/src/completions/record.rs index 2f95b8687..e1526b70b 100644 --- a/crates/ide_completion/src/completions/record.rs +++ b/crates/ide_completion/src/completions/record.rs | |||
@@ -13,20 +13,19 @@ pub(crate) fn complete_record(acc: &mut Completions, ctx: &CompletionContext) -> | |||
13 | let ty = ctx.sema.type_of_expr(&Expr::RecordExpr(record_lit.clone())); | 13 | let ty = ctx.sema.type_of_expr(&Expr::RecordExpr(record_lit.clone())); |
14 | let default_trait = FamousDefs(&ctx.sema, ctx.krate).core_default_Default(); | 14 | let default_trait = FamousDefs(&ctx.sema, ctx.krate).core_default_Default(); |
15 | let impl_default_trait = default_trait | 15 | let impl_default_trait = default_trait |
16 | .and_then(|default_trait| ty.map(|ty| ty.impls_trait(ctx.db, default_trait, &[]))) | 16 | .zip(ty) |
17 | .unwrap_or(false); | 17 | .map_or(false, |(default_trait, ty)| ty.impls_trait(ctx.db, default_trait, &[])); |
18 | 18 | ||
19 | let missing_fields = ctx.sema.record_literal_missing_fields(record_lit); | 19 | let missing_fields = ctx.sema.record_literal_missing_fields(record_lit); |
20 | if impl_default_trait && !missing_fields.is_empty() { | 20 | if impl_default_trait && !missing_fields.is_empty() { |
21 | let completion_text = "..Default::default()"; | 21 | let completion_text = "..Default::default()"; |
22 | let completion_text = completion_text | ||
23 | .strip_prefix(ctx.token.to_string().as_str()) | ||
24 | .unwrap_or(completion_text); | ||
25 | let mut item = CompletionItem::new( | 22 | let mut item = CompletionItem::new( |
26 | CompletionKind::Snippet, | 23 | CompletionKind::Snippet, |
27 | ctx.source_range(), | 24 | ctx.source_range(), |
28 | "..Default::default()", | 25 | completion_text, |
29 | ); | 26 | ); |
27 | let completion_text = | ||
28 | completion_text.strip_prefix(ctx.token.text()).unwrap_or(completion_text); | ||
30 | item.insert_text(completion_text).kind(SymbolKind::Field); | 29 | item.insert_text(completion_text).kind(SymbolKind::Field); |
31 | item.add_to(acc); | 30 | item.add_to(acc); |
32 | } | 31 | } |
@@ -108,8 +107,6 @@ fn process(f: S) { | |||
108 | check_snippet( | 107 | check_snippet( |
109 | test_code, | 108 | test_code, |
110 | expect![[r#" | 109 | expect![[r#" |
111 | sn pd | ||
112 | sn ppd | ||
113 | fd ..Default::default() | 110 | fd ..Default::default() |
114 | "#]], | 111 | "#]], |
115 | ); | 112 | ); |
@@ -179,13 +176,7 @@ fn process(f: S) { | |||
179 | "#]], | 176 | "#]], |
180 | ); | 177 | ); |
181 | 178 | ||
182 | check_snippet( | 179 | check_snippet(test_code, expect![[r#""#]]); |
183 | test_code, | ||
184 | expect![[r#" | ||
185 | sn pd | ||
186 | sn ppd | ||
187 | "#]], | ||
188 | ); | ||
189 | } | 180 | } |
190 | 181 | ||
191 | #[test] | 182 | #[test] |
diff --git a/crates/ide_completion/src/completions/snippet.rs b/crates/ide_completion/src/completions/snippet.rs index 7f7830976..defc25b00 100644 --- a/crates/ide_completion/src/completions/snippet.rs +++ b/crates/ide_completion/src/completions/snippet.rs | |||
@@ -14,7 +14,7 @@ fn snippet(ctx: &CompletionContext, cap: SnippetCap, label: &str, snippet: &str) | |||
14 | } | 14 | } |
15 | 15 | ||
16 | pub(crate) fn complete_expr_snippet(acc: &mut Completions, ctx: &CompletionContext) { | 16 | pub(crate) fn complete_expr_snippet(acc: &mut Completions, ctx: &CompletionContext) { |
17 | if !(ctx.is_trivial_path && ctx.function_syntax.is_some()) { | 17 | if !(ctx.is_trivial_path && ctx.function_def.is_some()) { |
18 | return; | 18 | return; |
19 | } | 19 | } |
20 | let cap = match ctx.config.snippet_cap { | 20 | let cap = match ctx.config.snippet_cap { |
@@ -22,8 +22,10 @@ pub(crate) fn complete_expr_snippet(acc: &mut Completions, ctx: &CompletionConte | |||
22 | None => return, | 22 | None => return, |
23 | }; | 23 | }; |
24 | 24 | ||
25 | snippet(ctx, cap, "pd", "eprintln!(\"$0 = {:?}\", $0);").add_to(acc); | 25 | if ctx.can_be_stmt { |
26 | snippet(ctx, cap, "ppd", "eprintln!(\"$0 = {:#?}\", $0);").add_to(acc); | 26 | snippet(ctx, cap, "pd", "eprintln!(\"$0 = {:?}\", $0);").add_to(acc); |
27 | snippet(ctx, cap, "ppd", "eprintln!(\"$0 = {:#?}\", $0);").add_to(acc); | ||
28 | } | ||
27 | } | 29 | } |
28 | 30 | ||
29 | pub(crate) fn complete_item_snippet(acc: &mut Completions, ctx: &CompletionContext) { | 31 | pub(crate) fn complete_item_snippet(acc: &mut Completions, ctx: &CompletionContext) { |
diff --git a/crates/ide_completion/src/completions/unqualified_path.rs b/crates/ide_completion/src/completions/unqualified_path.rs index 7875500c1..ede07f605 100644 --- a/crates/ide_completion/src/completions/unqualified_path.rs +++ b/crates/ide_completion/src/completions/unqualified_path.rs | |||
@@ -1,7 +1,6 @@ | |||
1 | //! Completion of names from the current scope, e.g. locals and imported items. | 1 | //! Completion of names from the current scope, e.g. locals and imported items. |
2 | 2 | ||
3 | use hir::ScopeDef; | 3 | use hir::ScopeDef; |
4 | use syntax::AstNode; | ||
5 | 4 | ||
6 | use crate::{CompletionContext, Completions}; | 5 | use crate::{CompletionContext, Completions}; |
7 | 6 | ||
@@ -9,14 +8,30 @@ pub(crate) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionC | |||
9 | if !ctx.is_trivial_path { | 8 | if !ctx.is_trivial_path { |
10 | return; | 9 | return; |
11 | } | 10 | } |
12 | if ctx.record_lit_syntax.is_some() | 11 | if ctx.is_path_disallowed() || ctx.expects_item() { |
13 | || ctx.record_pat_syntax.is_some() | 12 | return; |
14 | || ctx.attribute_under_caret.is_some() | 13 | } |
15 | || ctx.mod_declaration_under_caret.is_some() | 14 | if ctx.expects_assoc_item() { |
16 | { | 15 | ctx.scope.process_all_names(&mut |name, def| { |
16 | if let ScopeDef::MacroDef(macro_def) = def { | ||
17 | acc.add_macro(ctx, Some(name.to_string()), macro_def); | ||
18 | } | ||
19 | if let ScopeDef::ModuleDef(hir::ModuleDef::Module(_)) = def { | ||
20 | acc.add_resolution(ctx, name.to_string(), &def); | ||
21 | } | ||
22 | }); | ||
17 | return; | 23 | return; |
18 | } | 24 | } |
19 | 25 | ||
26 | if ctx.expects_use_tree() { | ||
27 | cov_mark::hit!(only_completes_modules_in_import); | ||
28 | ctx.scope.process_all_names(&mut |name, res| { | ||
29 | if let ScopeDef::ModuleDef(hir::ModuleDef::Module(_)) = res { | ||
30 | acc.add_resolution(ctx, name.to_string(), &res); | ||
31 | } | ||
32 | }); | ||
33 | return; | ||
34 | } | ||
20 | if let Some(hir::Adt::Enum(e)) = | 35 | if let Some(hir::Adt::Enum(e)) = |
21 | ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt()) | 36 | ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt()) |
22 | { | 37 | { |
@@ -30,14 +45,6 @@ pub(crate) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionC | |||
30 | cov_mark::hit!(skip_lifetime_completion); | 45 | cov_mark::hit!(skip_lifetime_completion); |
31 | return; | 46 | return; |
32 | } | 47 | } |
33 | if ctx.use_item_syntax.is_some() { | ||
34 | if let (ScopeDef::Unknown, Some(name_ref)) = (&res, &ctx.name_ref_syntax) { | ||
35 | if name_ref.syntax().text() == name.to_string().as_str() { | ||
36 | cov_mark::hit!(self_fulfilling_completion); | ||
37 | return; | ||
38 | } | ||
39 | } | ||
40 | } | ||
41 | acc.add_resolution(ctx, name.to_string(), &res); | 48 | acc.add_resolution(ctx, name.to_string(), &res); |
42 | }); | 49 | }); |
43 | } | 50 | } |
@@ -61,15 +68,17 @@ mod tests { | |||
61 | } | 68 | } |
62 | 69 | ||
63 | #[test] | 70 | #[test] |
64 | fn self_fulfilling_completion() { | 71 | fn only_completes_modules_in_import() { |
65 | cov_mark::check!(self_fulfilling_completion); | 72 | cov_mark::check!(only_completes_modules_in_import); |
66 | check( | 73 | check( |
67 | r#" | 74 | r#" |
68 | use foo$0 | 75 | use f$0 |
69 | use std::collections; | 76 | |
77 | struct Foo; | ||
78 | mod foo {} | ||
70 | "#, | 79 | "#, |
71 | expect![[r#" | 80 | expect![[r#" |
72 | ?? collections | 81 | md foo |
73 | "#]], | 82 | "#]], |
74 | ); | 83 | ); |
75 | } | 84 | } |
@@ -86,7 +95,7 @@ fn quux(x: Option<Enum>) { | |||
86 | } | 95 | } |
87 | } | 96 | } |
88 | "#, | 97 | "#, |
89 | expect![[""]], | 98 | expect![[r#""#]], |
90 | ); | 99 | ); |
91 | } | 100 | } |
92 | 101 | ||
@@ -102,7 +111,7 @@ fn quux(x: Option<Enum>) { | |||
102 | } | 111 | } |
103 | } | 112 | } |
104 | "#, | 113 | "#, |
105 | expect![[""]], | 114 | expect![[r#""#]], |
106 | ); | 115 | ); |
107 | } | 116 | } |
108 | 117 | ||
@@ -651,7 +660,7 @@ fn f() {} | |||
651 | } | 660 | } |
652 | 661 | ||
653 | #[test] | 662 | #[test] |
654 | fn completes_type_or_trait_in_impl_block() { | 663 | fn completes_target_type_or_trait_in_impl_block() { |
655 | check( | 664 | check( |
656 | r#" | 665 | r#" |
657 | trait MyTrait {} | 666 | trait MyTrait {} |
@@ -666,4 +675,41 @@ impl My$0 | |||
666 | "#]], | 675 | "#]], |
667 | ) | 676 | ) |
668 | } | 677 | } |
678 | |||
679 | #[test] | ||
680 | fn completes_in_assoc_item_list() { | ||
681 | check( | ||
682 | r#" | ||
683 | macro_rules! foo {} | ||
684 | mod bar {} | ||
685 | |||
686 | struct MyStruct {} | ||
687 | impl MyStruct { | ||
688 | $0 | ||
689 | } | ||
690 | "#, | ||
691 | expect![[r#" | ||
692 | md bar | ||
693 | ma foo! macro_rules! foo | ||
694 | "#]], | ||
695 | ) | ||
696 | } | ||
697 | |||
698 | // FIXME: The completions here currently come from `macro_in_item_position`, but they shouldn't | ||
699 | #[test] | ||
700 | fn completes_in_item_list() { | ||
701 | check( | ||
702 | r#" | ||
703 | struct MyStruct {} | ||
704 | macro_rules! foo {} | ||
705 | mod bar {} | ||
706 | |||
707 | $0 | ||
708 | "#, | ||
709 | expect![[r#" | ||
710 | md bar | ||
711 | ma foo!(…) macro_rules! foo | ||
712 | "#]], | ||
713 | ) | ||
714 | } | ||
669 | } | 715 | } |
diff --git a/crates/ide_completion/src/context.rs b/crates/ide_completion/src/context.rs index 1ec59ff80..8d6440cb2 100644 --- a/crates/ide_completion/src/context.rs +++ b/crates/ide_completion/src/context.rs | |||
@@ -1,29 +1,34 @@ | |||
1 | //! See `CompletionContext` structure. | 1 | //! See `CompletionContext` structure. |
2 | 2 | ||
3 | use hir::{Local, ScopeDef, Semantics, SemanticsScope, Type}; | 3 | use hir::{Local, ScopeDef, Semantics, SemanticsScope, Type}; |
4 | use ide_db::base_db::{FilePosition, SourceDatabase}; | 4 | use ide_db::{ |
5 | use ide_db::{call_info::ActiveParameter, RootDatabase}; | 5 | base_db::{FilePosition, SourceDatabase}, |
6 | call_info::ActiveParameter, | ||
7 | RootDatabase, | ||
8 | }; | ||
6 | use syntax::{ | 9 | use syntax::{ |
7 | algo::find_node_at_offset, | 10 | algo::find_node_at_offset, |
8 | ast::{self, NameOrNameRef, NameOwner}, | 11 | ast::{self, NameOrNameRef, NameOwner}, |
9 | match_ast, AstNode, NodeOrToken, | 12 | match_ast, AstNode, NodeOrToken, |
10 | SyntaxKind::*, | 13 | SyntaxKind::{self, *}, |
11 | SyntaxNode, SyntaxToken, TextRange, TextSize, | 14 | SyntaxNode, SyntaxToken, TextRange, TextSize, T, |
12 | }; | 15 | }; |
13 | |||
14 | use text_edit::Indel; | 16 | use text_edit::Indel; |
15 | 17 | ||
16 | use crate::{ | 18 | use crate::{ |
17 | patterns::{ | 19 | patterns::{ |
18 | fn_is_prev, for_is_prev2, has_bind_pat_parent, has_block_expr_parent, | 20 | determine_location, determine_prev_sibling, for_is_prev2, inside_impl_trait_block, |
19 | has_field_list_parent, has_impl_as_prev_sibling, has_impl_parent, | 21 | is_in_loop_body, is_match_arm, previous_token, ImmediateLocation, ImmediatePrevSibling, |
20 | has_item_list_or_source_file_parent, has_ref_parent, has_trait_as_prev_sibling, | ||
21 | has_trait_parent, if_is_prev, inside_impl_trait_block, is_in_loop_body, is_match_arm, | ||
22 | unsafe_is_prev, | ||
23 | }, | 22 | }, |
24 | CompletionConfig, | 23 | CompletionConfig, |
25 | }; | 24 | }; |
26 | 25 | ||
26 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] | ||
27 | pub(crate) enum PatternRefutability { | ||
28 | Refutable, | ||
29 | Irrefutable, | ||
30 | } | ||
31 | |||
27 | /// `CompletionContext` is created early during completion to figure out, where | 32 | /// `CompletionContext` is created early during completion to figure out, where |
28 | /// exactly is the cursor, syntax-wise. | 33 | /// exactly is the cursor, syntax-wise. |
29 | #[derive(Debug)] | 34 | #[derive(Debug)] |
@@ -41,28 +46,38 @@ pub(crate) struct CompletionContext<'a> { | |||
41 | pub(super) expected_name: Option<NameOrNameRef>, | 46 | pub(super) expected_name: Option<NameOrNameRef>, |
42 | pub(super) expected_type: Option<Type>, | 47 | pub(super) expected_type: Option<Type>, |
43 | pub(super) name_ref_syntax: Option<ast::NameRef>, | 48 | pub(super) name_ref_syntax: Option<ast::NameRef>, |
44 | pub(super) lifetime_syntax: Option<ast::Lifetime>, | 49 | |
45 | pub(super) lifetime_param_syntax: Option<ast::LifetimeParam>, | ||
46 | pub(super) function_syntax: Option<ast::Fn>, | ||
47 | pub(super) use_item_syntax: Option<ast::Use>, | 50 | pub(super) use_item_syntax: Option<ast::Use>, |
51 | |||
52 | /// The parent function of the cursor position if it exists. | ||
53 | pub(super) function_def: Option<ast::Fn>, | ||
54 | /// The parent impl of the cursor position if it exists. | ||
55 | pub(super) impl_def: Option<ast::Impl>, | ||
56 | |||
57 | /// RecordExpr the token is a field of | ||
48 | pub(super) record_lit_syntax: Option<ast::RecordExpr>, | 58 | pub(super) record_lit_syntax: Option<ast::RecordExpr>, |
59 | /// RecordPat the token is a field of | ||
49 | pub(super) record_pat_syntax: Option<ast::RecordPat>, | 60 | pub(super) record_pat_syntax: Option<ast::RecordPat>, |
50 | pub(super) record_field_syntax: Option<ast::RecordExprField>, | 61 | |
51 | pub(super) impl_def: Option<ast::Impl>, | 62 | // potentially set if we are completing a lifetime |
63 | pub(super) lifetime_syntax: Option<ast::Lifetime>, | ||
64 | pub(super) lifetime_param_syntax: Option<ast::LifetimeParam>, | ||
52 | pub(super) lifetime_allowed: bool, | 65 | pub(super) lifetime_allowed: bool, |
66 | pub(super) is_label_ref: bool, | ||
67 | |||
68 | // potentially set if we are completing a name | ||
69 | pub(super) is_pat_or_const: Option<PatternRefutability>, | ||
70 | pub(super) is_param: bool, | ||
71 | |||
72 | pub(super) completion_location: Option<ImmediateLocation>, | ||
73 | pub(super) prev_sibling: Option<ImmediatePrevSibling>, | ||
74 | |||
53 | /// FIXME: `ActiveParameter` is string-based, which is very very wrong | 75 | /// FIXME: `ActiveParameter` is string-based, which is very very wrong |
54 | pub(super) active_parameter: Option<ActiveParameter>, | 76 | pub(super) active_parameter: Option<ActiveParameter>, |
55 | pub(super) is_param: bool, | ||
56 | pub(super) is_label_ref: bool, | ||
57 | /// If a name-binding or reference to a const in a pattern. | ||
58 | /// Irrefutable patterns (like let) are excluded. | ||
59 | pub(super) is_pat_binding_or_const: bool, | ||
60 | pub(super) is_irrefutable_pat_binding: bool, | ||
61 | /// A single-indent path, like `foo`. `::foo` should not be considered a trivial path. | 77 | /// A single-indent path, like `foo`. `::foo` should not be considered a trivial path. |
62 | pub(super) is_trivial_path: bool, | 78 | pub(super) is_trivial_path: bool, |
63 | /// If not a trivial path, the prefix (qualifier). | 79 | /// If not a trivial path, the prefix (qualifier). |
64 | pub(super) path_qual: Option<ast::Path>, | 80 | pub(super) path_qual: Option<ast::Path>, |
65 | pub(super) after_if: bool, | ||
66 | /// `true` if we are a statement or a last expr in the block. | 81 | /// `true` if we are a statement or a last expr in the block. |
67 | pub(super) can_be_stmt: bool, | 82 | pub(super) can_be_stmt: bool, |
68 | /// `true` if we expect an expression at the cursor position. | 83 | /// `true` if we expect an expression at the cursor position. |
@@ -82,24 +97,15 @@ pub(crate) struct CompletionContext<'a> { | |||
82 | pub(super) has_type_args: bool, | 97 | pub(super) has_type_args: bool, |
83 | pub(super) attribute_under_caret: Option<ast::Attr>, | 98 | pub(super) attribute_under_caret: Option<ast::Attr>, |
84 | pub(super) mod_declaration_under_caret: Option<ast::Module>, | 99 | pub(super) mod_declaration_under_caret: Option<ast::Module>, |
85 | pub(super) unsafe_is_prev: bool, | 100 | pub(super) locals: Vec<(String, Local)>, |
86 | pub(super) if_is_prev: bool, | 101 | |
87 | pub(super) block_expr_parent: bool, | 102 | // keyword patterns |
88 | pub(super) bind_pat_parent: bool, | 103 | pub(super) previous_token: Option<SyntaxToken>, |
89 | pub(super) ref_pat_parent: bool, | ||
90 | pub(super) in_loop_body: bool, | 104 | pub(super) in_loop_body: bool, |
91 | pub(super) has_trait_parent: bool, | ||
92 | pub(super) has_impl_parent: bool, | ||
93 | pub(super) inside_impl_trait_block: bool, | ||
94 | pub(super) has_field_list_parent: bool, | ||
95 | pub(super) trait_as_prev_sibling: bool, | ||
96 | pub(super) impl_as_prev_sibling: bool, | ||
97 | pub(super) is_match_arm: bool, | 105 | pub(super) is_match_arm: bool, |
98 | pub(super) has_item_list_or_source_file_parent: bool, | ||
99 | pub(super) for_is_prev2: bool, | ||
100 | pub(super) fn_is_prev: bool, | ||
101 | pub(super) incomplete_let: bool, | 106 | pub(super) incomplete_let: bool, |
102 | pub(super) locals: Vec<(String, Local)>, | 107 | |
108 | no_completion_required: bool, | ||
103 | } | 109 | } |
104 | 110 | ||
105 | impl<'a> CompletionContext<'a> { | 111 | impl<'a> CompletionContext<'a> { |
@@ -149,20 +155,17 @@ impl<'a> CompletionContext<'a> { | |||
149 | name_ref_syntax: None, | 155 | name_ref_syntax: None, |
150 | lifetime_syntax: None, | 156 | lifetime_syntax: None, |
151 | lifetime_param_syntax: None, | 157 | lifetime_param_syntax: None, |
152 | function_syntax: None, | 158 | function_def: None, |
153 | use_item_syntax: None, | 159 | use_item_syntax: None, |
154 | record_lit_syntax: None, | 160 | record_lit_syntax: None, |
155 | record_pat_syntax: None, | 161 | record_pat_syntax: None, |
156 | record_field_syntax: None, | ||
157 | impl_def: None, | 162 | impl_def: None, |
158 | active_parameter: ActiveParameter::at(db, position), | 163 | active_parameter: ActiveParameter::at(db, position), |
159 | is_label_ref: false, | 164 | is_label_ref: false, |
160 | is_param: false, | 165 | is_param: false, |
161 | is_pat_binding_or_const: false, | 166 | is_pat_or_const: None, |
162 | is_irrefutable_pat_binding: false, | ||
163 | is_trivial_path: false, | 167 | is_trivial_path: false, |
164 | path_qual: None, | 168 | path_qual: None, |
165 | after_if: false, | ||
166 | can_be_stmt: false, | 169 | can_be_stmt: false, |
167 | is_expr: false, | 170 | is_expr: false, |
168 | is_new_item: false, | 171 | is_new_item: false, |
@@ -175,67 +178,57 @@ impl<'a> CompletionContext<'a> { | |||
175 | has_type_args: false, | 178 | has_type_args: false, |
176 | attribute_under_caret: None, | 179 | attribute_under_caret: None, |
177 | mod_declaration_under_caret: None, | 180 | mod_declaration_under_caret: None, |
178 | unsafe_is_prev: false, | 181 | previous_token: None, |
179 | if_is_prev: false, | ||
180 | block_expr_parent: false, | ||
181 | bind_pat_parent: false, | ||
182 | ref_pat_parent: false, | ||
183 | in_loop_body: false, | 182 | in_loop_body: false, |
184 | has_trait_parent: false, | 183 | completion_location: None, |
185 | has_impl_parent: false, | 184 | prev_sibling: None, |
186 | inside_impl_trait_block: false, | ||
187 | has_field_list_parent: false, | ||
188 | trait_as_prev_sibling: false, | ||
189 | impl_as_prev_sibling: false, | ||
190 | is_match_arm: false, | 185 | is_match_arm: false, |
191 | has_item_list_or_source_file_parent: false, | 186 | no_completion_required: false, |
192 | for_is_prev2: false, | ||
193 | fn_is_prev: false, | ||
194 | incomplete_let: false, | 187 | incomplete_let: false, |
195 | locals, | 188 | locals, |
196 | }; | 189 | }; |
197 | 190 | ||
198 | let mut original_file = original_file.syntax().clone(); | 191 | let mut original_file = original_file.syntax().clone(); |
199 | let mut hypothetical_file = file_with_fake_ident.syntax().clone(); | 192 | let mut speculative_file = file_with_fake_ident.syntax().clone(); |
200 | let mut offset = position.offset; | 193 | let mut offset = position.offset; |
201 | let mut fake_ident_token = fake_ident_token; | 194 | let mut fake_ident_token = fake_ident_token; |
202 | 195 | ||
203 | // Are we inside a macro call? | 196 | // Are we inside a macro call? |
204 | while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = ( | 197 | while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = ( |
205 | find_node_at_offset::<ast::MacroCall>(&original_file, offset), | 198 | find_node_at_offset::<ast::MacroCall>(&original_file, offset), |
206 | find_node_at_offset::<ast::MacroCall>(&hypothetical_file, offset), | 199 | find_node_at_offset::<ast::MacroCall>(&speculative_file, offset), |
207 | ) { | 200 | ) { |
208 | if actual_macro_call.path().as_ref().map(|s| s.syntax().text()) | 201 | if actual_macro_call.path().as_ref().map(|s| s.syntax().text()) |
209 | != macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text()) | 202 | != macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text()) |
210 | { | 203 | { |
211 | break; | 204 | break; |
212 | } | 205 | } |
213 | let hypothetical_args = match macro_call_with_fake_ident.token_tree() { | 206 | let speculative_args = match macro_call_with_fake_ident.token_tree() { |
214 | Some(tt) => tt, | 207 | Some(tt) => tt, |
215 | None => break, | 208 | None => break, |
216 | }; | 209 | }; |
217 | if let (Some(actual_expansion), Some(hypothetical_expansion)) = ( | 210 | if let (Some(actual_expansion), Some(speculative_expansion)) = ( |
218 | ctx.sema.expand(&actual_macro_call), | 211 | ctx.sema.expand(&actual_macro_call), |
219 | ctx.sema.speculative_expand( | 212 | ctx.sema.speculative_expand( |
220 | &actual_macro_call, | 213 | &actual_macro_call, |
221 | &hypothetical_args, | 214 | &speculative_args, |
222 | fake_ident_token, | 215 | fake_ident_token, |
223 | ), | 216 | ), |
224 | ) { | 217 | ) { |
225 | let new_offset = hypothetical_expansion.1.text_range().start(); | 218 | let new_offset = speculative_expansion.1.text_range().start(); |
226 | if new_offset > actual_expansion.text_range().end() { | 219 | if new_offset > actual_expansion.text_range().end() { |
227 | break; | 220 | break; |
228 | } | 221 | } |
229 | original_file = actual_expansion; | 222 | original_file = actual_expansion; |
230 | hypothetical_file = hypothetical_expansion.0; | 223 | speculative_file = speculative_expansion.0; |
231 | fake_ident_token = hypothetical_expansion.1; | 224 | fake_ident_token = speculative_expansion.1; |
232 | offset = new_offset; | 225 | offset = new_offset; |
233 | } else { | 226 | } else { |
234 | break; | 227 | break; |
235 | } | 228 | } |
236 | } | 229 | } |
237 | ctx.fill_keyword_patterns(&hypothetical_file, offset); | 230 | ctx.fill_keyword_patterns(&speculative_file, offset); |
238 | ctx.fill(&original_file, hypothetical_file, offset); | 231 | ctx.fill(&original_file, speculative_file, offset); |
239 | Some(ctx) | 232 | Some(ctx) |
240 | } | 233 | } |
241 | 234 | ||
@@ -245,7 +238,7 @@ impl<'a> CompletionContext<'a> { | |||
245 | /// Exception for this case is `impl Trait for Foo`, where we would like to hint trait method names. | 238 | /// Exception for this case is `impl Trait for Foo`, where we would like to hint trait method names. |
246 | /// - `for _ i$0` -- obviously, it'll be "in" keyword. | 239 | /// - `for _ i$0` -- obviously, it'll be "in" keyword. |
247 | pub(crate) fn no_completion_required(&self) -> bool { | 240 | pub(crate) fn no_completion_required(&self) -> bool { |
248 | (self.fn_is_prev && !self.inside_impl_trait_block) || self.for_is_prev2 | 241 | self.no_completion_required |
249 | } | 242 | } |
250 | 243 | ||
251 | /// The range of the identifier that is being completed. | 244 | /// The range of the identifier that is being completed. |
@@ -264,33 +257,85 @@ impl<'a> CompletionContext<'a> { | |||
264 | } | 257 | } |
265 | } | 258 | } |
266 | 259 | ||
260 | pub(crate) fn previous_token_is(&self, kind: SyntaxKind) -> bool { | ||
261 | self.previous_token.as_ref().map_or(false, |tok| tok.kind() == kind) | ||
262 | } | ||
263 | |||
264 | pub(crate) fn expects_assoc_item(&self) -> bool { | ||
265 | matches!( | ||
266 | self.completion_location, | ||
267 | Some(ImmediateLocation::Trait) | Some(ImmediateLocation::Impl) | ||
268 | ) | ||
269 | } | ||
270 | |||
271 | pub(crate) fn expects_use_tree(&self) -> bool { | ||
272 | matches!(self.completion_location, Some(ImmediateLocation::Use)) | ||
273 | } | ||
274 | |||
275 | pub(crate) fn expects_non_trait_assoc_item(&self) -> bool { | ||
276 | matches!(self.completion_location, Some(ImmediateLocation::Impl)) | ||
277 | } | ||
278 | |||
279 | pub(crate) fn expects_item(&self) -> bool { | ||
280 | matches!(self.completion_location, Some(ImmediateLocation::ItemList)) | ||
281 | } | ||
282 | |||
283 | pub(crate) fn expects_expression(&self) -> bool { | ||
284 | self.is_expr | ||
285 | } | ||
286 | |||
287 | pub(crate) fn has_block_expr_parent(&self) -> bool { | ||
288 | matches!(self.completion_location, Some(ImmediateLocation::BlockExpr)) | ||
289 | } | ||
290 | |||
291 | pub(crate) fn expects_ident_pat_or_ref_expr(&self) -> bool { | ||
292 | matches!( | ||
293 | self.completion_location, | ||
294 | Some(ImmediateLocation::IdentPat) | Some(ImmediateLocation::RefExpr) | ||
295 | ) | ||
296 | } | ||
297 | |||
298 | pub(crate) fn expect_record_field(&self) -> bool { | ||
299 | matches!(self.completion_location, Some(ImmediateLocation::RecordField)) | ||
300 | } | ||
301 | |||
302 | pub(crate) fn has_impl_or_trait_prev_sibling(&self) -> bool { | ||
303 | matches!( | ||
304 | self.prev_sibling, | ||
305 | Some(ImmediatePrevSibling::ImplDefType) | Some(ImmediatePrevSibling::TraitDefName) | ||
306 | ) | ||
307 | } | ||
308 | |||
309 | pub(crate) fn after_if(&self) -> bool { | ||
310 | matches!(self.prev_sibling, Some(ImmediatePrevSibling::IfExpr)) | ||
311 | } | ||
312 | |||
313 | pub(crate) fn is_path_disallowed(&self) -> bool { | ||
314 | self.record_lit_syntax.is_some() | ||
315 | || self.record_pat_syntax.is_some() | ||
316 | || self.attribute_under_caret.is_some() | ||
317 | || self.mod_declaration_under_caret.is_some() | ||
318 | } | ||
319 | |||
267 | fn fill_keyword_patterns(&mut self, file_with_fake_ident: &SyntaxNode, offset: TextSize) { | 320 | fn fill_keyword_patterns(&mut self, file_with_fake_ident: &SyntaxNode, offset: TextSize) { |
268 | let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased().unwrap(); | 321 | let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased().unwrap(); |
269 | let syntax_element = NodeOrToken::Token(fake_ident_token); | 322 | let syntax_element = NodeOrToken::Token(fake_ident_token); |
270 | self.block_expr_parent = has_block_expr_parent(syntax_element.clone()); | 323 | self.previous_token = previous_token(syntax_element.clone()); |
271 | self.unsafe_is_prev = unsafe_is_prev(syntax_element.clone()); | ||
272 | self.if_is_prev = if_is_prev(syntax_element.clone()); | ||
273 | self.bind_pat_parent = has_bind_pat_parent(syntax_element.clone()); | ||
274 | self.ref_pat_parent = has_ref_parent(syntax_element.clone()); | ||
275 | self.in_loop_body = is_in_loop_body(syntax_element.clone()); | 324 | self.in_loop_body = is_in_loop_body(syntax_element.clone()); |
276 | self.has_trait_parent = has_trait_parent(syntax_element.clone()); | ||
277 | self.has_impl_parent = has_impl_parent(syntax_element.clone()); | ||
278 | self.inside_impl_trait_block = inside_impl_trait_block(syntax_element.clone()); | ||
279 | self.has_field_list_parent = has_field_list_parent(syntax_element.clone()); | ||
280 | self.impl_as_prev_sibling = has_impl_as_prev_sibling(syntax_element.clone()); | ||
281 | self.trait_as_prev_sibling = has_trait_as_prev_sibling(syntax_element.clone()); | ||
282 | self.is_match_arm = is_match_arm(syntax_element.clone()); | 325 | self.is_match_arm = is_match_arm(syntax_element.clone()); |
283 | self.has_item_list_or_source_file_parent = | 326 | |
284 | has_item_list_or_source_file_parent(syntax_element.clone()); | ||
285 | self.mod_declaration_under_caret = | 327 | self.mod_declaration_under_caret = |
286 | find_node_at_offset::<ast::Module>(&file_with_fake_ident, offset) | 328 | find_node_at_offset::<ast::Module>(&file_with_fake_ident, offset) |
287 | .filter(|module| module.item_list().is_none()); | 329 | .filter(|module| module.item_list().is_none()); |
288 | self.for_is_prev2 = for_is_prev2(syntax_element.clone()); | ||
289 | self.fn_is_prev = fn_is_prev(syntax_element.clone()); | ||
290 | self.incomplete_let = | 330 | self.incomplete_let = |
291 | syntax_element.ancestors().take(6).find_map(ast::LetStmt::cast).map_or(false, |it| { | 331 | syntax_element.ancestors().take(6).find_map(ast::LetStmt::cast).map_or(false, |it| { |
292 | it.syntax().text_range().end() == syntax_element.text_range().end() | 332 | it.syntax().text_range().end() == syntax_element.text_range().end() |
293 | }); | 333 | }); |
334 | |||
335 | let inside_impl_trait_block = inside_impl_trait_block(syntax_element.clone()); | ||
336 | let fn_is_prev = self.previous_token_is(T![fn]); | ||
337 | let for_is_prev2 = for_is_prev2(syntax_element.clone()); | ||
338 | self.no_completion_required = (fn_is_prev && !inside_impl_trait_block) || for_is_prev2; | ||
294 | } | 339 | } |
295 | 340 | ||
296 | fn fill_impl_def(&mut self) { | 341 | fn fill_impl_def(&mut self) { |
@@ -412,67 +457,21 @@ impl<'a> CompletionContext<'a> { | |||
412 | self.expected_type = expected_type; | 457 | self.expected_type = expected_type; |
413 | self.expected_name = expected_name; | 458 | self.expected_name = expected_name; |
414 | self.attribute_under_caret = find_node_at_offset(&file_with_fake_ident, offset); | 459 | self.attribute_under_caret = find_node_at_offset(&file_with_fake_ident, offset); |
415 | 460 | let name_like = match find_node_at_offset(&&file_with_fake_ident, offset) { | |
416 | if let Some(lifetime) = find_node_at_offset::<ast::Lifetime>(&file_with_fake_ident, offset) | 461 | Some(it) => it, |
417 | { | 462 | None => return, |
418 | self.classify_lifetime(original_file, lifetime, offset); | 463 | }; |
419 | } | 464 | self.completion_location = determine_location(&name_like); |
420 | 465 | self.prev_sibling = determine_prev_sibling(&name_like); | |
421 | // First, let's try to complete a reference to some declaration. | 466 | match name_like { |
422 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&file_with_fake_ident, offset) { | 467 | ast::NameLike::Lifetime(lifetime) => { |
423 | // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. | 468 | self.classify_lifetime(original_file, lifetime, offset); |
424 | // See RFC#1685. | ||
425 | if is_node::<ast::Param>(name_ref.syntax()) { | ||
426 | self.is_param = true; | ||
427 | return; | ||
428 | } | ||
429 | // FIXME: remove this (V) duplication and make the check more precise | ||
430 | if name_ref.syntax().ancestors().find_map(ast::RecordPatFieldList::cast).is_some() { | ||
431 | self.record_pat_syntax = | ||
432 | self.sema.find_node_at_offset_with_macros(&original_file, offset); | ||
433 | } | ||
434 | self.classify_name_ref(original_file, name_ref, offset); | ||
435 | } | ||
436 | |||
437 | // Otherwise, see if this is a declaration. We can use heuristics to | ||
438 | // suggest declaration names, see `CompletionKind::Magic`. | ||
439 | if let Some(name) = find_node_at_offset::<ast::Name>(&file_with_fake_ident, offset) { | ||
440 | if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::IdentPat::cast) { | ||
441 | self.is_pat_binding_or_const = true; | ||
442 | if bind_pat.at_token().is_some() | ||
443 | || bind_pat.ref_token().is_some() | ||
444 | || bind_pat.mut_token().is_some() | ||
445 | { | ||
446 | self.is_pat_binding_or_const = false; | ||
447 | } | ||
448 | if bind_pat.syntax().parent().and_then(ast::RecordPatFieldList::cast).is_some() { | ||
449 | self.is_pat_binding_or_const = false; | ||
450 | } | ||
451 | if let Some(Some(pat)) = bind_pat.syntax().ancestors().find_map(|node| { | ||
452 | match_ast! { | ||
453 | match node { | ||
454 | ast::LetStmt(it) => Some(it.pat()), | ||
455 | ast::Param(it) => Some(it.pat()), | ||
456 | _ => None, | ||
457 | } | ||
458 | } | ||
459 | }) { | ||
460 | if pat.syntax().text_range().contains_range(bind_pat.syntax().text_range()) { | ||
461 | self.is_pat_binding_or_const = false; | ||
462 | self.is_irrefutable_pat_binding = true; | ||
463 | } | ||
464 | } | ||
465 | |||
466 | self.fill_impl_def(); | ||
467 | } | 469 | } |
468 | if is_node::<ast::Param>(name.syntax()) { | 470 | ast::NameLike::NameRef(name_ref) => { |
469 | self.is_param = true; | 471 | self.classify_name_ref(original_file, name_ref, offset); |
470 | return; | ||
471 | } | 472 | } |
472 | // FIXME: remove this (^) duplication and make the check more precise | 473 | ast::NameLike::Name(name) => { |
473 | if name.syntax().ancestors().find_map(ast::RecordPatFieldList::cast).is_some() { | 474 | self.classify_name(original_file, name, offset); |
474 | self.record_pat_syntax = | ||
475 | self.sema.find_node_at_offset_with_macros(&original_file, offset); | ||
476 | } | 475 | } |
477 | } | 476 | } |
478 | } | 477 | } |
@@ -506,22 +505,71 @@ impl<'a> CompletionContext<'a> { | |||
506 | } | 505 | } |
507 | } | 506 | } |
508 | 507 | ||
508 | fn classify_name(&mut self, original_file: &SyntaxNode, name: ast::Name, offset: TextSize) { | ||
509 | if let Some(bind_pat) = name.syntax().parent().and_then(ast::IdentPat::cast) { | ||
510 | self.is_pat_or_const = Some(PatternRefutability::Refutable); | ||
511 | // if any of these is here our bind pat can't be a const pat anymore | ||
512 | let complex_ident_pat = bind_pat.at_token().is_some() | ||
513 | || bind_pat.ref_token().is_some() | ||
514 | || bind_pat.mut_token().is_some(); | ||
515 | if complex_ident_pat { | ||
516 | self.is_pat_or_const = None; | ||
517 | } else { | ||
518 | let irrefutable_pat = bind_pat.syntax().ancestors().find_map(|node| { | ||
519 | match_ast! { | ||
520 | match node { | ||
521 | ast::LetStmt(it) => Some(it.pat()), | ||
522 | ast::Param(it) => Some(it.pat()), | ||
523 | _ => None, | ||
524 | } | ||
525 | } | ||
526 | }); | ||
527 | if let Some(Some(pat)) = irrefutable_pat { | ||
528 | // This check is here since we could be inside a pattern in the initializer expression of the let statement. | ||
529 | if pat.syntax().text_range().contains_range(bind_pat.syntax().text_range()) { | ||
530 | self.is_pat_or_const = Some(PatternRefutability::Irrefutable); | ||
531 | } | ||
532 | } | ||
533 | |||
534 | let is_name_in_field_pat = bind_pat | ||
535 | .syntax() | ||
536 | .parent() | ||
537 | .and_then(ast::RecordPatField::cast) | ||
538 | .map_or(false, |pat_field| pat_field.name_ref().is_none()); | ||
539 | if is_name_in_field_pat { | ||
540 | self.is_pat_or_const = None; | ||
541 | } | ||
542 | } | ||
543 | |||
544 | self.fill_impl_def(); | ||
545 | } | ||
546 | self.is_param |= is_node::<ast::Param>(name.syntax()); | ||
547 | if ast::RecordPatField::for_field_name(&name).is_some() { | ||
548 | self.record_pat_syntax = | ||
549 | self.sema.find_node_at_offset_with_macros(&original_file, offset); | ||
550 | } | ||
551 | } | ||
552 | |||
509 | fn classify_name_ref( | 553 | fn classify_name_ref( |
510 | &mut self, | 554 | &mut self, |
511 | original_file: &SyntaxNode, | 555 | original_file: &SyntaxNode, |
512 | name_ref: ast::NameRef, | 556 | name_ref: ast::NameRef, |
513 | offset: TextSize, | 557 | offset: TextSize, |
514 | ) { | 558 | ) { |
515 | self.name_ref_syntax = | 559 | self.fill_impl_def(); |
516 | find_node_at_offset(original_file, name_ref.syntax().text_range().start()); | ||
517 | let name_range = name_ref.syntax().text_range(); | ||
518 | if ast::RecordExprField::for_field_name(&name_ref).is_some() { | 560 | if ast::RecordExprField::for_field_name(&name_ref).is_some() { |
519 | self.record_lit_syntax = | 561 | self.record_lit_syntax = |
520 | self.sema.find_node_at_offset_with_macros(original_file, offset); | 562 | self.sema.find_node_at_offset_with_macros(original_file, offset); |
521 | } | 563 | } |
564 | if ast::RecordPatField::for_field_name_ref(&name_ref).is_some() { | ||
565 | self.record_pat_syntax = | ||
566 | self.sema.find_node_at_offset_with_macros(&original_file, offset); | ||
567 | } | ||
522 | 568 | ||
523 | self.fill_impl_def(); | 569 | self.name_ref_syntax = |
570 | find_node_at_offset(original_file, name_ref.syntax().text_range().start()); | ||
524 | 571 | ||
572 | let name_range = name_ref.syntax().text_range(); | ||
525 | let top_node = name_ref | 573 | let top_node = name_ref |
526 | .syntax() | 574 | .syntax() |
527 | .ancestors() | 575 | .ancestors() |
@@ -529,31 +577,20 @@ impl<'a> CompletionContext<'a> { | |||
529 | .last() | 577 | .last() |
530 | .unwrap(); | 578 | .unwrap(); |
531 | 579 | ||
532 | match top_node.parent().map(|it| it.kind()) { | 580 | if matches!(top_node.parent().map(|it| it.kind()), Some(SOURCE_FILE) | Some(ITEM_LIST)) { |
533 | Some(SOURCE_FILE) | Some(ITEM_LIST) => { | 581 | self.is_new_item = true; |
534 | self.is_new_item = true; | 582 | return; |
535 | return; | ||
536 | } | ||
537 | _ => (), | ||
538 | } | 583 | } |
539 | 584 | ||
540 | self.use_item_syntax = | 585 | self.use_item_syntax = |
541 | self.sema.token_ancestors_with_macros(self.token.clone()).find_map(ast::Use::cast); | 586 | self.sema.token_ancestors_with_macros(self.token.clone()).find_map(ast::Use::cast); |
542 | 587 | ||
543 | self.function_syntax = self | 588 | self.function_def = self |
544 | .sema | 589 | .sema |
545 | .token_ancestors_with_macros(self.token.clone()) | 590 | .token_ancestors_with_macros(self.token.clone()) |
546 | .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) | 591 | .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) |
547 | .find_map(ast::Fn::cast); | 592 | .find_map(ast::Fn::cast); |
548 | 593 | ||
549 | self.record_field_syntax = self | ||
550 | .sema | ||
551 | .token_ancestors_with_macros(self.token.clone()) | ||
552 | .take_while(|it| { | ||
553 | it.kind() != SOURCE_FILE && it.kind() != MODULE && it.kind() != CALL_EXPR | ||
554 | }) | ||
555 | .find_map(ast::RecordExprField::cast); | ||
556 | |||
557 | let parent = match name_ref.syntax().parent() { | 594 | let parent = match name_ref.syntax().parent() { |
558 | Some(it) => it, | 595 | Some(it) => it, |
559 | None => return, | 596 | None => return, |
@@ -614,18 +651,8 @@ impl<'a> CompletionContext<'a> { | |||
614 | }) | 651 | }) |
615 | .unwrap_or(false); | 652 | .unwrap_or(false); |
616 | self.is_expr = path.syntax().parent().and_then(ast::PathExpr::cast).is_some(); | 653 | self.is_expr = path.syntax().parent().and_then(ast::PathExpr::cast).is_some(); |
617 | |||
618 | if let Some(off) = name_ref.syntax().text_range().start().checked_sub(2.into()) { | ||
619 | if let Some(if_expr) = | ||
620 | self.sema.find_node_at_offset_with_macros::<ast::IfExpr>(original_file, off) | ||
621 | { | ||
622 | if if_expr.syntax().text_range().end() < name_ref.syntax().text_range().start() | ||
623 | { | ||
624 | self.after_if = true; | ||
625 | } | ||
626 | } | ||
627 | } | ||
628 | } | 654 | } |
655 | |||
629 | if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { | 656 | if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { |
630 | // The receiver comes before the point of insertion of the fake | 657 | // The receiver comes before the point of insertion of the fake |
631 | // ident, so it should have the same range in the non-modified file | 658 | // ident, so it should have the same range in the non-modified file |
@@ -643,6 +670,7 @@ impl<'a> CompletionContext<'a> { | |||
643 | false | 670 | false |
644 | }; | 671 | }; |
645 | } | 672 | } |
673 | |||
646 | if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent) { | 674 | if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent) { |
647 | // As above | 675 | // As above |
648 | self.dot_receiver = method_call_expr | 676 | self.dot_receiver = method_call_expr |
diff --git a/crates/ide_completion/src/lib.rs b/crates/ide_completion/src/lib.rs index 645349215..1152a9850 100644 --- a/crates/ide_completion/src/lib.rs +++ b/crates/ide_completion/src/lib.rs | |||
@@ -107,7 +107,7 @@ pub use crate::{ | |||
107 | /// identifier prefix/fuzzy match should be done higher in the stack, together | 107 | /// identifier prefix/fuzzy match should be done higher in the stack, together |
108 | /// with ordering of completions (currently this is done by the client). | 108 | /// with ordering of completions (currently this is done by the client). |
109 | /// | 109 | /// |
110 | /// # Hypothetical Completion Problem | 110 | /// # Speculative Completion Problem |
111 | /// | 111 | /// |
112 | /// There's a curious unsolved problem in the current implementation. Often, you | 112 | /// There's a curious unsolved problem in the current implementation. Often, you |
113 | /// want to compute completions on a *slightly different* text document. | 113 | /// want to compute completions on a *slightly different* text document. |
@@ -121,7 +121,7 @@ pub use crate::{ | |||
121 | /// doesn't allow such "phantom" inputs. | 121 | /// doesn't allow such "phantom" inputs. |
122 | /// | 122 | /// |
123 | /// Another case where this would be instrumental is macro expansion. We want to | 123 | /// Another case where this would be instrumental is macro expansion. We want to |
124 | /// insert a fake ident and re-expand code. There's `expand_hypothetical` as a | 124 | /// insert a fake ident and re-expand code. There's `expand_speculative` as a |
125 | /// work-around for this. | 125 | /// work-around for this. |
126 | /// | 126 | /// |
127 | /// A different use-case is completion of injection (examples and links in doc | 127 | /// A different use-case is completion of injection (examples and links in doc |
diff --git a/crates/ide_completion/src/patterns.rs b/crates/ide_completion/src/patterns.rs index d82564381..caf0ef39f 100644 --- a/crates/ide_completion/src/patterns.rs +++ b/crates/ide_completion/src/patterns.rs | |||
@@ -10,29 +10,143 @@ use syntax::{ | |||
10 | 10 | ||
11 | #[cfg(test)] | 11 | #[cfg(test)] |
12 | use crate::test_utils::{check_pattern_is_applicable, check_pattern_is_not_applicable}; | 12 | use crate::test_utils::{check_pattern_is_applicable, check_pattern_is_not_applicable}; |
13 | /// Direct parent container of the cursor position | ||
14 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] | ||
15 | pub(crate) enum ImmediatePrevSibling { | ||
16 | IfExpr, | ||
17 | TraitDefName, | ||
18 | ImplDefType, | ||
19 | } | ||
13 | 20 | ||
14 | pub(crate) fn has_trait_parent(element: SyntaxElement) -> bool { | 21 | /// Direct parent container of the cursor position |
15 | not_same_range_ancestor(element) | 22 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] |
16 | .filter(|it| it.kind() == ASSOC_ITEM_LIST) | 23 | pub(crate) enum ImmediateLocation { |
17 | .and_then(|it| it.parent()) | 24 | Use, |
18 | .filter(|it| it.kind() == TRAIT) | 25 | Impl, |
19 | .is_some() | 26 | Trait, |
27 | RecordField, | ||
28 | RefExpr, | ||
29 | IdentPat, | ||
30 | BlockExpr, | ||
31 | ItemList, | ||
20 | } | 32 | } |
21 | #[test] | 33 | |
22 | fn test_has_trait_parent() { | 34 | pub(crate) fn determine_prev_sibling(name_like: &ast::NameLike) -> Option<ImmediatePrevSibling> { |
23 | check_pattern_is_applicable(r"trait A { f$0 }", has_trait_parent); | 35 | let node = maximize_name_ref(name_like)?; |
36 | let node = match node.parent().and_then(ast::MacroCall::cast) { | ||
37 | // When a path is being typed after the name of a trait/type of an impl it is being | ||
38 | // parsed as a macro, so when the trait/impl has a block following it an we are between the | ||
39 | // name and block the macro will attach the block to itself so maximizing fails to take | ||
40 | // that into account | ||
41 | // FIXME path expr and statement have a similar problem with attrs | ||
42 | Some(call) | ||
43 | if call.excl_token().is_none() | ||
44 | && call.token_tree().map_or(false, |t| t.l_curly_token().is_some()) | ||
45 | && call.semicolon_token().is_none() => | ||
46 | { | ||
47 | call.syntax().clone() | ||
48 | } | ||
49 | _ => node, | ||
50 | }; | ||
51 | let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?; | ||
52 | let res = match_ast! { | ||
53 | match prev_sibling { | ||
54 | ast::ExprStmt(it) => { | ||
55 | let node = it.expr().filter(|_| it.semicolon_token().is_none())?.syntax().clone(); | ||
56 | match_ast! { | ||
57 | match node { | ||
58 | ast::IfExpr(_it) => ImmediatePrevSibling::IfExpr, | ||
59 | _ => return None, | ||
60 | } | ||
61 | } | ||
62 | }, | ||
63 | ast::Trait(it) => if it.assoc_item_list().is_none() { | ||
64 | ImmediatePrevSibling::TraitDefName | ||
65 | } else { | ||
66 | return None | ||
67 | }, | ||
68 | ast::Impl(it) => if it.assoc_item_list().is_none() | ||
69 | && (it.for_token().is_none() || it.self_ty().is_some()) { | ||
70 | ImmediatePrevSibling::ImplDefType | ||
71 | } else { | ||
72 | return None | ||
73 | }, | ||
74 | _ => return None, | ||
75 | } | ||
76 | }; | ||
77 | Some(res) | ||
24 | } | 78 | } |
25 | 79 | ||
26 | pub(crate) fn has_impl_parent(element: SyntaxElement) -> bool { | 80 | pub(crate) fn determine_location(name_like: &ast::NameLike) -> Option<ImmediateLocation> { |
27 | not_same_range_ancestor(element) | 81 | let node = maximize_name_ref(name_like)?; |
28 | .filter(|it| it.kind() == ASSOC_ITEM_LIST) | 82 | let parent = match node.parent() { |
29 | .and_then(|it| it.parent()) | 83 | Some(parent) => match ast::MacroCall::cast(parent.clone()) { |
30 | .filter(|it| it.kind() == IMPL) | 84 | // When a path is being typed in an (Assoc)ItemList the parser will always emit a macro_call. |
31 | .is_some() | 85 | // This is usually fine as the node expansion code above already accounts for that with |
86 | // the ancestors call, but there is one exception to this which is that when an attribute | ||
87 | // precedes it the code above will not walk the Path to the parent MacroCall as their ranges differ. | ||
88 | // FIXME path expr and statement have a similar problem | ||
89 | Some(call) | ||
90 | if call.excl_token().is_none() | ||
91 | && call.token_tree().is_none() | ||
92 | && call.semicolon_token().is_none() => | ||
93 | { | ||
94 | call.syntax().parent()? | ||
95 | } | ||
96 | _ => parent, | ||
97 | }, | ||
98 | // SourceFile | ||
99 | None => { | ||
100 | return match node.kind() { | ||
101 | MACRO_ITEMS | SOURCE_FILE => Some(ImmediateLocation::ItemList), | ||
102 | _ => None, | ||
103 | } | ||
104 | } | ||
105 | }; | ||
106 | let res = match_ast! { | ||
107 | match parent { | ||
108 | ast::IdentPat(_it) => ImmediateLocation::IdentPat, | ||
109 | ast::Use(_it) => ImmediateLocation::Use, | ||
110 | ast::BlockExpr(_it) => ImmediateLocation::BlockExpr, | ||
111 | ast::SourceFile(_it) => ImmediateLocation::ItemList, | ||
112 | ast::ItemList(_it) => ImmediateLocation::ItemList, | ||
113 | ast::RefExpr(_it) => ImmediateLocation::RefExpr, | ||
114 | ast::RecordField(_it) => ImmediateLocation::RecordField, | ||
115 | ast::AssocItemList(it) => match it.syntax().parent().map(|it| it.kind()) { | ||
116 | Some(IMPL) => ImmediateLocation::Impl, | ||
117 | Some(TRAIT) => ImmediateLocation::Trait, | ||
118 | _ => return None, | ||
119 | }, | ||
120 | _ => return None, | ||
121 | } | ||
122 | }; | ||
123 | Some(res) | ||
32 | } | 124 | } |
33 | #[test] | 125 | |
34 | fn test_has_impl_parent() { | 126 | fn maximize_name_ref(name_like: &ast::NameLike) -> Option<SyntaxNode> { |
35 | check_pattern_is_applicable(r"impl A { f$0 }", has_impl_parent); | 127 | // First walk the element we are completing up to its highest node that has the same text range |
128 | // as the element so that we can check in what context it immediately lies. We only do this for | ||
129 | // NameRef -> Path as that's the only thing that makes sense to being "expanded" semantically. | ||
130 | // We only wanna do this if the NameRef is the last segment of the path. | ||
131 | let node = match name_like { | ||
132 | ast::NameLike::NameRef(name_ref) => { | ||
133 | if let Some(segment) = name_ref.syntax().parent().and_then(ast::PathSegment::cast) { | ||
134 | let p = segment.parent_path(); | ||
135 | if p.parent_path().is_none() { | ||
136 | p.syntax() | ||
137 | .ancestors() | ||
138 | .take_while(|it| it.text_range() == p.syntax().text_range()) | ||
139 | .last()? | ||
140 | } else { | ||
141 | return None; | ||
142 | } | ||
143 | } else { | ||
144 | return None; | ||
145 | } | ||
146 | } | ||
147 | it @ ast::NameLike::Name(_) | it @ ast::NameLike::Lifetime(_) => it.syntax().clone(), | ||
148 | }; | ||
149 | Some(node) | ||
36 | } | 150 | } |
37 | 151 | ||
38 | pub(crate) fn inside_impl_trait_block(element: SyntaxElement) -> bool { | 152 | pub(crate) fn inside_impl_trait_block(element: SyntaxElement) -> bool { |
@@ -53,56 +167,6 @@ fn test_inside_impl_trait_block() { | |||
53 | check_pattern_is_not_applicable(r"impl A { fn f$0 }", inside_impl_trait_block); | 167 | check_pattern_is_not_applicable(r"impl A { fn f$0 }", inside_impl_trait_block); |
54 | } | 168 | } |
55 | 169 | ||
56 | pub(crate) fn has_field_list_parent(element: SyntaxElement) -> bool { | ||
57 | not_same_range_ancestor(element).filter(|it| it.kind() == RECORD_FIELD_LIST).is_some() | ||
58 | } | ||
59 | #[test] | ||
60 | fn test_has_field_list_parent() { | ||
61 | check_pattern_is_applicable(r"struct Foo { f$0 }", has_field_list_parent); | ||
62 | check_pattern_is_applicable(r"struct Foo { f$0 pub f: i32}", has_field_list_parent); | ||
63 | } | ||
64 | |||
65 | pub(crate) fn has_block_expr_parent(element: SyntaxElement) -> bool { | ||
66 | not_same_range_ancestor(element).filter(|it| it.kind() == BLOCK_EXPR).is_some() | ||
67 | } | ||
68 | #[test] | ||
69 | fn test_has_block_expr_parent() { | ||
70 | check_pattern_is_applicable(r"fn my_fn() { let a = 2; f$0 }", has_block_expr_parent); | ||
71 | } | ||
72 | |||
73 | pub(crate) fn has_bind_pat_parent(element: SyntaxElement) -> bool { | ||
74 | element.ancestors().any(|it| it.kind() == IDENT_PAT) | ||
75 | } | ||
76 | #[test] | ||
77 | fn test_has_bind_pat_parent() { | ||
78 | check_pattern_is_applicable(r"fn my_fn(m$0) {}", has_bind_pat_parent); | ||
79 | check_pattern_is_applicable(r"fn my_fn() { let m$0 }", has_bind_pat_parent); | ||
80 | } | ||
81 | |||
82 | pub(crate) fn has_ref_parent(element: SyntaxElement) -> bool { | ||
83 | not_same_range_ancestor(element) | ||
84 | .filter(|it| it.kind() == REF_PAT || it.kind() == REF_EXPR) | ||
85 | .is_some() | ||
86 | } | ||
87 | #[test] | ||
88 | fn test_has_ref_parent() { | ||
89 | check_pattern_is_applicable(r"fn my_fn(&m$0) {}", has_ref_parent); | ||
90 | check_pattern_is_applicable(r"fn my() { let &m$0 }", has_ref_parent); | ||
91 | } | ||
92 | |||
93 | pub(crate) fn has_item_list_or_source_file_parent(element: SyntaxElement) -> bool { | ||
94 | let ancestor = not_same_range_ancestor(element); | ||
95 | if !ancestor.is_some() { | ||
96 | return true; | ||
97 | } | ||
98 | ancestor.filter(|it| it.kind() == SOURCE_FILE || it.kind() == ITEM_LIST).is_some() | ||
99 | } | ||
100 | #[test] | ||
101 | fn test_has_item_list_or_source_file_parent() { | ||
102 | check_pattern_is_applicable(r"i$0", has_item_list_or_source_file_parent); | ||
103 | check_pattern_is_applicable(r"mod foo { f$0 }", has_item_list_or_source_file_parent); | ||
104 | } | ||
105 | |||
106 | pub(crate) fn is_match_arm(element: SyntaxElement) -> bool { | 170 | pub(crate) fn is_match_arm(element: SyntaxElement) -> bool { |
107 | not_same_range_ancestor(element.clone()).filter(|it| it.kind() == MATCH_ARM).is_some() | 171 | not_same_range_ancestor(element.clone()).filter(|it| it.kind() == MATCH_ARM).is_some() |
108 | && previous_sibling_or_ancestor_sibling(element) | 172 | && previous_sibling_or_ancestor_sibling(element) |
@@ -115,36 +179,8 @@ fn test_is_match_arm() { | |||
115 | check_pattern_is_applicable(r"fn my_fn() { match () { () => m$0 } }", is_match_arm); | 179 | check_pattern_is_applicable(r"fn my_fn() { match () { () => m$0 } }", is_match_arm); |
116 | } | 180 | } |
117 | 181 | ||
118 | pub(crate) fn unsafe_is_prev(element: SyntaxElement) -> bool { | 182 | pub(crate) fn previous_token(element: SyntaxElement) -> Option<SyntaxToken> { |
119 | element | 183 | element.into_token().and_then(|it| previous_non_trivia_token(it)) |
120 | .into_token() | ||
121 | .and_then(|it| previous_non_trivia_token(it)) | ||
122 | .filter(|it| it.kind() == T![unsafe]) | ||
123 | .is_some() | ||
124 | } | ||
125 | #[test] | ||
126 | fn test_unsafe_is_prev() { | ||
127 | check_pattern_is_applicable(r"unsafe i$0", unsafe_is_prev); | ||
128 | } | ||
129 | |||
130 | pub(crate) fn if_is_prev(element: SyntaxElement) -> bool { | ||
131 | element | ||
132 | .into_token() | ||
133 | .and_then(|it| previous_non_trivia_token(it)) | ||
134 | .filter(|it| it.kind() == T![if]) | ||
135 | .is_some() | ||
136 | } | ||
137 | |||
138 | pub(crate) fn fn_is_prev(element: SyntaxElement) -> bool { | ||
139 | element | ||
140 | .into_token() | ||
141 | .and_then(|it| previous_non_trivia_token(it)) | ||
142 | .filter(|it| it.kind() == T![fn]) | ||
143 | .is_some() | ||
144 | } | ||
145 | #[test] | ||
146 | fn test_fn_is_prev() { | ||
147 | check_pattern_is_applicable(r"fn l$0", fn_is_prev); | ||
148 | } | 184 | } |
149 | 185 | ||
150 | /// Check if the token previous to the previous one is `for`. | 186 | /// Check if the token previous to the previous one is `for`. |
@@ -162,55 +198,26 @@ fn test_for_is_prev2() { | |||
162 | check_pattern_is_applicable(r"for i i$0", for_is_prev2); | 198 | check_pattern_is_applicable(r"for i i$0", for_is_prev2); |
163 | } | 199 | } |
164 | 200 | ||
165 | #[test] | ||
166 | fn test_if_is_prev() { | ||
167 | check_pattern_is_applicable(r"if l$0", if_is_prev); | ||
168 | } | ||
169 | |||
170 | pub(crate) fn has_trait_as_prev_sibling(element: SyntaxElement) -> bool { | ||
171 | previous_sibling_or_ancestor_sibling(element).filter(|it| it.kind() == TRAIT).is_some() | ||
172 | } | ||
173 | #[test] | ||
174 | fn test_has_trait_as_prev_sibling() { | ||
175 | check_pattern_is_applicable(r"trait A w$0 {}", has_trait_as_prev_sibling); | ||
176 | } | ||
177 | |||
178 | pub(crate) fn has_impl_as_prev_sibling(element: SyntaxElement) -> bool { | ||
179 | previous_sibling_or_ancestor_sibling(element).filter(|it| it.kind() == IMPL).is_some() | ||
180 | } | ||
181 | #[test] | ||
182 | fn test_has_impl_as_prev_sibling() { | ||
183 | check_pattern_is_applicable(r"impl A w$0 {}", has_impl_as_prev_sibling); | ||
184 | } | ||
185 | |||
186 | pub(crate) fn is_in_loop_body(element: SyntaxElement) -> bool { | 201 | pub(crate) fn is_in_loop_body(element: SyntaxElement) -> bool { |
187 | for node in element.ancestors() { | ||
188 | if node.kind() == FN || node.kind() == CLOSURE_EXPR { | ||
189 | break; | ||
190 | } | ||
191 | let loop_body = match_ast! { | ||
192 | match node { | ||
193 | ast::ForExpr(it) => it.loop_body(), | ||
194 | ast::WhileExpr(it) => it.loop_body(), | ||
195 | ast::LoopExpr(it) => it.loop_body(), | ||
196 | _ => None, | ||
197 | } | ||
198 | }; | ||
199 | if let Some(body) = loop_body { | ||
200 | if body.syntax().text_range().contains_range(element.text_range()) { | ||
201 | return true; | ||
202 | } | ||
203 | } | ||
204 | } | ||
205 | false | ||
206 | } | ||
207 | |||
208 | fn not_same_range_ancestor(element: SyntaxElement) -> Option<SyntaxNode> { | ||
209 | element | 202 | element |
210 | .ancestors() | 203 | .ancestors() |
211 | .take_while(|it| it.text_range() == element.text_range()) | 204 | .take_while(|it| it.kind() != FN && it.kind() != CLOSURE_EXPR) |
212 | .last() | 205 | .find_map(|it| { |
213 | .and_then(|it| it.parent()) | 206 | let loop_body = match_ast! { |
207 | match it { | ||
208 | ast::ForExpr(it) => it.loop_body(), | ||
209 | ast::WhileExpr(it) => it.loop_body(), | ||
210 | ast::LoopExpr(it) => it.loop_body(), | ||
211 | _ => None, | ||
212 | } | ||
213 | }; | ||
214 | loop_body.filter(|it| it.syntax().text_range().contains_range(element.text_range())) | ||
215 | }) | ||
216 | .is_some() | ||
217 | } | ||
218 | |||
219 | pub(crate) fn not_same_range_ancestor(element: SyntaxElement) -> Option<SyntaxNode> { | ||
220 | element.ancestors().skip_while(|it| it.text_range() == element.text_range()).next() | ||
214 | } | 221 | } |
215 | 222 | ||
216 | fn previous_non_trivia_token(token: SyntaxToken) -> Option<SyntaxToken> { | 223 | fn previous_non_trivia_token(token: SyntaxToken) -> Option<SyntaxToken> { |
@@ -239,3 +246,111 @@ fn previous_sibling_or_ancestor_sibling(element: SyntaxElement) -> Option<Syntax | |||
239 | non_trivia_sibling(NodeOrToken::Node(prev_sibling_node), Direction::Prev) | 246 | non_trivia_sibling(NodeOrToken::Node(prev_sibling_node), Direction::Prev) |
240 | } | 247 | } |
241 | } | 248 | } |
249 | |||
250 | #[cfg(test)] | ||
251 | mod tests { | ||
252 | use super::*; | ||
253 | |||
254 | fn check_location(code: &str, loc: impl Into<Option<ImmediateLocation>>) { | ||
255 | check_pattern_is_applicable(code, |e| { | ||
256 | let name = &e.parent().and_then(ast::NameLike::cast).expect("Expected a namelike"); | ||
257 | assert_eq!(determine_location(name), loc.into()); | ||
258 | true | ||
259 | }); | ||
260 | } | ||
261 | |||
262 | fn check_prev_sibling(code: &str, sibling: impl Into<Option<ImmediatePrevSibling>>) { | ||
263 | check_pattern_is_applicable(code, |e| { | ||
264 | let name = &e.parent().and_then(ast::NameLike::cast).expect("Expected a namelike"); | ||
265 | assert_eq!(determine_prev_sibling(name), sibling.into()); | ||
266 | true | ||
267 | }); | ||
268 | } | ||
269 | |||
270 | #[test] | ||
271 | fn test_trait_loc() { | ||
272 | check_location(r"trait A { f$0 }", ImmediateLocation::Trait); | ||
273 | check_location(r"trait A { #[attr] f$0 }", ImmediateLocation::Trait); | ||
274 | check_location(r"trait A { f$0 fn f() {} }", ImmediateLocation::Trait); | ||
275 | check_location(r"trait A { fn f() {} f$0 }", ImmediateLocation::Trait); | ||
276 | check_location(r"trait A$0 {}", None); | ||
277 | check_location(r"trait A { fn f$0 }", None); | ||
278 | } | ||
279 | |||
280 | #[test] | ||
281 | fn test_impl_loc() { | ||
282 | check_location(r"impl A { f$0 }", ImmediateLocation::Impl); | ||
283 | check_location(r"impl A { #[attr] f$0 }", ImmediateLocation::Impl); | ||
284 | check_location(r"impl A { f$0 fn f() {} }", ImmediateLocation::Impl); | ||
285 | check_location(r"impl A { fn f() {} f$0 }", ImmediateLocation::Impl); | ||
286 | check_location(r"impl A$0 {}", None); | ||
287 | check_location(r"impl A { fn f$0 }", None); | ||
288 | } | ||
289 | |||
290 | #[test] | ||
291 | fn test_use_loc() { | ||
292 | check_location(r"use f$0", ImmediateLocation::Use); | ||
293 | check_location(r"use f$0;", ImmediateLocation::Use); | ||
294 | check_location(r"use f::{f$0}", None); | ||
295 | check_location(r"use {f$0}", None); | ||
296 | } | ||
297 | |||
298 | #[test] | ||
299 | fn test_record_field_loc() { | ||
300 | check_location(r"struct Foo { f$0 }", ImmediateLocation::RecordField); | ||
301 | check_location(r"struct Foo { f$0 pub f: i32}", ImmediateLocation::RecordField); | ||
302 | check_location(r"struct Foo { pub f: i32, f$0 }", ImmediateLocation::RecordField); | ||
303 | } | ||
304 | |||
305 | #[test] | ||
306 | fn test_block_expr_loc() { | ||
307 | check_location(r"fn my_fn() { let a = 2; f$0 }", ImmediateLocation::BlockExpr); | ||
308 | check_location(r"fn my_fn() { f$0 f }", ImmediateLocation::BlockExpr); | ||
309 | } | ||
310 | |||
311 | #[test] | ||
312 | fn test_ident_pat_loc() { | ||
313 | check_location(r"fn my_fn(m$0) {}", ImmediateLocation::IdentPat); | ||
314 | check_location(r"fn my_fn() { let m$0 }", ImmediateLocation::IdentPat); | ||
315 | check_location(r"fn my_fn(&m$0) {}", ImmediateLocation::IdentPat); | ||
316 | check_location(r"fn my_fn() { let &m$0 }", ImmediateLocation::IdentPat); | ||
317 | } | ||
318 | |||
319 | #[test] | ||
320 | fn test_ref_expr_loc() { | ||
321 | check_location(r"fn my_fn() { let x = &m$0 foo; }", ImmediateLocation::RefExpr); | ||
322 | } | ||
323 | |||
324 | #[test] | ||
325 | fn test_item_list_loc() { | ||
326 | check_location(r"i$0", ImmediateLocation::ItemList); | ||
327 | check_location(r"#[attr] i$0", ImmediateLocation::ItemList); | ||
328 | check_location(r"fn f() {} i$0", ImmediateLocation::ItemList); | ||
329 | check_location(r"mod foo { f$0 }", ImmediateLocation::ItemList); | ||
330 | check_location(r"mod foo { #[attr] f$0 }", ImmediateLocation::ItemList); | ||
331 | check_location(r"mod foo { fn f() {} f$0 }", ImmediateLocation::ItemList); | ||
332 | check_location(r"mod foo$0 {}", None); | ||
333 | } | ||
334 | |||
335 | #[test] | ||
336 | fn test_impl_prev_sibling() { | ||
337 | check_prev_sibling(r"impl A w$0 ", ImmediatePrevSibling::ImplDefType); | ||
338 | check_prev_sibling(r"impl A w$0 {}", ImmediatePrevSibling::ImplDefType); | ||
339 | check_prev_sibling(r"impl A for A w$0 ", ImmediatePrevSibling::ImplDefType); | ||
340 | check_prev_sibling(r"impl A for A w$0 {}", ImmediatePrevSibling::ImplDefType); | ||
341 | check_prev_sibling(r"impl A for w$0 {}", None); | ||
342 | check_prev_sibling(r"impl A for w$0", None); | ||
343 | } | ||
344 | |||
345 | #[test] | ||
346 | fn test_trait_prev_sibling() { | ||
347 | check_prev_sibling(r"trait A w$0 ", ImmediatePrevSibling::TraitDefName); | ||
348 | check_prev_sibling(r"trait A w$0 {}", ImmediatePrevSibling::TraitDefName); | ||
349 | } | ||
350 | |||
351 | #[test] | ||
352 | fn test_if_expr_prev_sibling() { | ||
353 | check_prev_sibling(r"fn foo() { if true {} w$0", ImmediatePrevSibling::IfExpr); | ||
354 | check_prev_sibling(r"fn foo() { if true {}; w$0", None); | ||
355 | } | ||
356 | } | ||
diff --git a/crates/ide_completion/src/render.rs b/crates/ide_completion/src/render.rs index d7f96b864..91300c56e 100644 --- a/crates/ide_completion/src/render.rs +++ b/crates/ide_completion/src/render.rs | |||
@@ -187,10 +187,7 @@ impl<'a> Render<'a> { | |||
187 | ScopeDef::ModuleDef(Function(func)) => { | 187 | ScopeDef::ModuleDef(Function(func)) => { |
188 | return render_fn(self.ctx, import_to_add, Some(local_name), *func); | 188 | return render_fn(self.ctx, import_to_add, Some(local_name), *func); |
189 | } | 189 | } |
190 | ScopeDef::ModuleDef(Variant(_)) | 190 | ScopeDef::ModuleDef(Variant(_)) if self.ctx.completion.is_pat_or_const.is_some() => { |
191 | if self.ctx.completion.is_pat_binding_or_const | ||
192 | | self.ctx.completion.is_irrefutable_pat_binding => | ||
193 | { | ||
194 | CompletionItemKind::SymbolKind(SymbolKind::Variant) | 191 | CompletionItemKind::SymbolKind(SymbolKind::Variant) |
195 | } | 192 | } |
196 | ScopeDef::ModuleDef(Variant(var)) => { | 193 | ScopeDef::ModuleDef(Variant(var)) => { |
diff --git a/crates/ide_completion/src/render/macro_.rs b/crates/ide_completion/src/render/macro_.rs index 7578ad50b..b90fd3890 100644 --- a/crates/ide_completion/src/render/macro_.rs +++ b/crates/ide_completion/src/render/macro_.rs | |||
@@ -74,7 +74,11 @@ impl<'a> MacroRender<'a> { | |||
74 | if self.needs_bang() && self.ctx.snippet_cap().is_some() { | 74 | if self.needs_bang() && self.ctx.snippet_cap().is_some() { |
75 | format!("{}!{}…{}", self.name, self.bra, self.ket) | 75 | format!("{}!{}…{}", self.name, self.bra, self.ket) |
76 | } else { | 76 | } else { |
77 | self.banged_name() | 77 | if self.macro_.kind() == hir::MacroKind::Derive { |
78 | self.name.to_string() | ||
79 | } else { | ||
80 | self.banged_name() | ||
81 | } | ||
78 | } | 82 | } |
79 | } | 83 | } |
80 | 84 | ||
diff --git a/crates/ide_completion/src/test_utils.rs b/crates/ide_completion/src/test_utils.rs index 37be575e5..93c7c872c 100644 --- a/crates/ide_completion/src/test_utils.rs +++ b/crates/ide_completion/src/test_utils.rs | |||
@@ -12,7 +12,7 @@ use ide_db::{ | |||
12 | use itertools::Itertools; | 12 | use itertools::Itertools; |
13 | use stdx::{format_to, trim_indent}; | 13 | use stdx::{format_to, trim_indent}; |
14 | use syntax::{AstNode, NodeOrToken, SyntaxElement}; | 14 | use syntax::{AstNode, NodeOrToken, SyntaxElement}; |
15 | use test_utils::{assert_eq_text, RangeOrOffset}; | 15 | use test_utils::assert_eq_text; |
16 | 16 | ||
17 | use crate::{item::CompletionKind, CompletionConfig, CompletionItem}; | 17 | use crate::{item::CompletionKind, CompletionConfig, CompletionItem}; |
18 | 18 | ||
@@ -36,10 +36,7 @@ pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) { | |||
36 | let mut database = RootDatabase::default(); | 36 | let mut database = RootDatabase::default(); |
37 | database.apply_change(change_fixture.change); | 37 | database.apply_change(change_fixture.change); |
38 | let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); | 38 | let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); |
39 | let offset = match range_or_offset { | 39 | let offset = range_or_offset.expect_offset(); |
40 | RangeOrOffset::Range(_) => panic!(), | ||
41 | RangeOrOffset::Offset(it) => it, | ||
42 | }; | ||
43 | (database, FilePosition { file_id, offset }) | 40 | (database, FilePosition { file_id, offset }) |
44 | } | 41 | } |
45 | 42 | ||
@@ -52,10 +49,11 @@ pub(crate) fn do_completion_with_config( | |||
52 | code: &str, | 49 | code: &str, |
53 | kind: CompletionKind, | 50 | kind: CompletionKind, |
54 | ) -> Vec<CompletionItem> { | 51 | ) -> Vec<CompletionItem> { |
55 | let mut kind_completions: Vec<CompletionItem> = | 52 | get_all_items(config, code) |
56 | get_all_items(config, code).into_iter().filter(|c| c.completion_kind == kind).collect(); | 53 | .into_iter() |
57 | kind_completions.sort_by(|l, r| l.label().cmp(r.label())); | 54 | .filter(|c| c.completion_kind == kind) |
58 | kind_completions | 55 | .sorted_by(|l, r| l.label().cmp(r.label())) |
56 | .collect() | ||
59 | } | 57 | } |
60 | 58 | ||
61 | pub(crate) fn completion_list(code: &str, kind: CompletionKind) -> String { | 59 | pub(crate) fn completion_list(code: &str, kind: CompletionKind) -> String { |
@@ -132,7 +130,7 @@ pub(crate) fn check_edit_with_config( | |||
132 | assert_eq_text!(&ra_fixture_after, &actual) | 130 | assert_eq_text!(&ra_fixture_after, &actual) |
133 | } | 131 | } |
134 | 132 | ||
135 | pub(crate) fn check_pattern_is_applicable(code: &str, check: fn(SyntaxElement) -> bool) { | 133 | pub(crate) fn check_pattern_is_applicable(code: &str, check: impl FnOnce(SyntaxElement) -> bool) { |
136 | let (db, pos) = position(code); | 134 | let (db, pos) = position(code); |
137 | 135 | ||
138 | let sema = Semantics::new(&db); | 136 | let sema = Semantics::new(&db); |
diff --git a/crates/ide_db/src/call_info/tests.rs b/crates/ide_db/src/call_info/tests.rs index 1aeda08e5..b585085f3 100644 --- a/crates/ide_db/src/call_info/tests.rs +++ b/crates/ide_db/src/call_info/tests.rs | |||
@@ -1,6 +1,5 @@ | |||
1 | use base_db::{fixture::ChangeFixture, FilePosition}; | 1 | use base_db::{fixture::ChangeFixture, FilePosition}; |
2 | use expect_test::{expect, Expect}; | 2 | use expect_test::{expect, Expect}; |
3 | use test_utils::RangeOrOffset; | ||
4 | 3 | ||
5 | use crate::RootDatabase; | 4 | use crate::RootDatabase; |
6 | 5 | ||
@@ -10,10 +9,7 @@ pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) { | |||
10 | let mut database = RootDatabase::default(); | 9 | let mut database = RootDatabase::default(); |
11 | database.apply_change(change_fixture.change); | 10 | database.apply_change(change_fixture.change); |
12 | let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); | 11 | let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); |
13 | let offset = match range_or_offset { | 12 | let offset = range_or_offset.expect_offset(); |
14 | RangeOrOffset::Range(_) => panic!(), | ||
15 | RangeOrOffset::Offset(it) => it, | ||
16 | }; | ||
17 | (database, FilePosition { file_id, offset }) | 13 | (database, FilePosition { file_id, offset }) |
18 | } | 14 | } |
19 | 15 | ||
diff --git a/crates/ide_db/src/helpers/import_assets.rs b/crates/ide_db/src/helpers/import_assets.rs index 91d6a4665..ae52dd8bb 100644 --- a/crates/ide_db/src/helpers/import_assets.rs +++ b/crates/ide_db/src/helpers/import_assets.rs | |||
@@ -420,6 +420,8 @@ fn trait_applicable_items( | |||
420 | 420 | ||
421 | let db = sema.db; | 421 | let db = sema.db; |
422 | 422 | ||
423 | let related_dyn_traits = | ||
424 | trait_candidate.receiver_ty.applicable_inherent_traits(db).collect::<FxHashSet<_>>(); | ||
423 | let mut required_assoc_items = FxHashSet::default(); | 425 | let mut required_assoc_items = FxHashSet::default(); |
424 | let trait_candidates = items_locator::items_with_name( | 426 | let trait_candidates = items_locator::items_with_name( |
425 | sema, | 427 | sema, |
@@ -431,13 +433,15 @@ fn trait_applicable_items( | |||
431 | .filter_map(|input| item_as_assoc(db, input)) | 433 | .filter_map(|input| item_as_assoc(db, input)) |
432 | .filter_map(|assoc| { | 434 | .filter_map(|assoc| { |
433 | let assoc_item_trait = assoc.containing_trait(db)?; | 435 | let assoc_item_trait = assoc.containing_trait(db)?; |
434 | required_assoc_items.insert(assoc); | 436 | if related_dyn_traits.contains(&assoc_item_trait) { |
435 | Some(assoc_item_trait.into()) | 437 | None |
438 | } else { | ||
439 | required_assoc_items.insert(assoc); | ||
440 | Some(assoc_item_trait.into()) | ||
441 | } | ||
436 | }) | 442 | }) |
437 | .collect(); | 443 | .collect(); |
438 | 444 | ||
439 | let related_dyn_traits = | ||
440 | trait_candidate.receiver_ty.applicable_inherent_traits(db).collect::<FxHashSet<_>>(); | ||
441 | let mut located_imports = FxHashSet::default(); | 445 | let mut located_imports = FxHashSet::default(); |
442 | 446 | ||
443 | if trait_assoc_item { | 447 | if trait_assoc_item { |
@@ -454,10 +458,6 @@ fn trait_applicable_items( | |||
454 | } | 458 | } |
455 | } | 459 | } |
456 | let located_trait = assoc.containing_trait(db)?; | 460 | let located_trait = assoc.containing_trait(db)?; |
457 | if related_dyn_traits.contains(&located_trait) { | ||
458 | return None; | ||
459 | } | ||
460 | |||
461 | let trait_item = ItemInNs::from(ModuleDef::from(located_trait)); | 461 | let trait_item = ItemInNs::from(ModuleDef::from(located_trait)); |
462 | let original_item = assoc_to_item(assoc); | 462 | let original_item = assoc_to_item(assoc); |
463 | located_imports.insert(LocatedImport::new( | 463 | located_imports.insert(LocatedImport::new( |
@@ -480,9 +480,6 @@ fn trait_applicable_items( | |||
480 | let assoc = function.as_assoc_item(db)?; | 480 | let assoc = function.as_assoc_item(db)?; |
481 | if required_assoc_items.contains(&assoc) { | 481 | if required_assoc_items.contains(&assoc) { |
482 | let located_trait = assoc.containing_trait(db)?; | 482 | let located_trait = assoc.containing_trait(db)?; |
483 | if related_dyn_traits.contains(&located_trait) { | ||
484 | return None; | ||
485 | } | ||
486 | let trait_item = ItemInNs::from(ModuleDef::from(located_trait)); | 483 | let trait_item = ItemInNs::from(ModuleDef::from(located_trait)); |
487 | let original_item = assoc_to_item(assoc); | 484 | let original_item = assoc_to_item(assoc); |
488 | located_imports.insert(LocatedImport::new( | 485 | located_imports.insert(LocatedImport::new( |
diff --git a/crates/ide_db/src/traits/tests.rs b/crates/ide_db/src/traits/tests.rs index 2a5482024..de994407c 100644 --- a/crates/ide_db/src/traits/tests.rs +++ b/crates/ide_db/src/traits/tests.rs | |||
@@ -2,7 +2,6 @@ use base_db::{fixture::ChangeFixture, FilePosition}; | |||
2 | use expect_test::{expect, Expect}; | 2 | use expect_test::{expect, Expect}; |
3 | use hir::Semantics; | 3 | use hir::Semantics; |
4 | use syntax::ast::{self, AstNode}; | 4 | use syntax::ast::{self, AstNode}; |
5 | use test_utils::RangeOrOffset; | ||
6 | 5 | ||
7 | use crate::RootDatabase; | 6 | use crate::RootDatabase; |
8 | 7 | ||
@@ -12,10 +11,7 @@ pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) { | |||
12 | let mut database = RootDatabase::default(); | 11 | let mut database = RootDatabase::default(); |
13 | database.apply_change(change_fixture.change); | 12 | database.apply_change(change_fixture.change); |
14 | let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); | 13 | let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); |
15 | let offset = match range_or_offset { | 14 | let offset = range_or_offset.expect_offset(); |
16 | RangeOrOffset::Range(_) => panic!(), | ||
17 | RangeOrOffset::Offset(it) => it, | ||
18 | }; | ||
19 | (database, FilePosition { file_id, offset }) | 15 | (database, FilePosition { file_id, offset }) |
20 | } | 16 | } |
21 | 17 | ||
diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 75d2f2eed..84ca3ff87 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs | |||
@@ -701,7 +701,7 @@ fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragmen | |||
701 | "path" => Path, | 701 | "path" => Path, |
702 | "expr" => Expr, | 702 | "expr" => Expr, |
703 | "ty" => Type, | 703 | "ty" => Type, |
704 | "pat" => Pattern, | 704 | "pat" | "pat_param" => Pattern, // FIXME: edition2021 |
705 | "stmt" => Statement, | 705 | "stmt" => Statement, |
706 | "block" => Block, | 706 | "block" => Block, |
707 | "meta" => MetaItem, | 707 | "meta" => MetaItem, |
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 3af5bc18b..b95374b76 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs | |||
@@ -14,6 +14,7 @@ mod tests; | |||
14 | 14 | ||
15 | #[cfg(test)] | 15 | #[cfg(test)] |
16 | mod benchmark; | 16 | mod benchmark; |
17 | mod token_map; | ||
17 | 18 | ||
18 | use std::fmt; | 19 | use std::fmt; |
19 | 20 | ||
@@ -63,9 +64,12 @@ impl fmt::Display for ExpandError { | |||
63 | } | 64 | } |
64 | } | 65 | } |
65 | 66 | ||
66 | pub use crate::syntax_bridge::{ | 67 | pub use crate::{ |
67 | ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, | 68 | syntax_bridge::{ |
68 | token_tree_to_syntax_node, TokenMap, | 69 | ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, |
70 | token_tree_to_syntax_node, | ||
71 | }, | ||
72 | token_map::TokenMap, | ||
69 | }; | 73 | }; |
70 | 74 | ||
71 | /// This struct contains AST for a single `macro_rules` definition. What might | 75 | /// This struct contains AST for a single `macro_rules` definition. What might |
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index b13168bd3..b11172caf 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs | |||
@@ -10,36 +10,8 @@ use syntax::{ | |||
10 | }; | 10 | }; |
11 | use tt::buffer::{Cursor, TokenBuffer}; | 11 | use tt::buffer::{Cursor, TokenBuffer}; |
12 | 12 | ||
13 | use crate::ExpandError; | ||
14 | use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter}; | 13 | use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter}; |
15 | 14 | use crate::{ExpandError, TokenMap}; | |
16 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | ||
17 | pub enum TokenTextRange { | ||
18 | Token(TextRange), | ||
19 | Delimiter(TextRange), | ||
20 | } | ||
21 | |||
22 | impl TokenTextRange { | ||
23 | pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> { | ||
24 | match self { | ||
25 | TokenTextRange::Token(it) => Some(it), | ||
26 | TokenTextRange::Delimiter(it) => match kind { | ||
27 | T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())), | ||
28 | T!['}'] | T![')'] | T![']'] => { | ||
29 | Some(TextRange::at(it.end() - TextSize::of('}'), 1.into())) | ||
30 | } | ||
31 | _ => None, | ||
32 | }, | ||
33 | } | ||
34 | } | ||
35 | } | ||
36 | |||
37 | /// Maps `tt::TokenId` to the relative range of the original token. | ||
38 | #[derive(Debug, PartialEq, Eq, Clone, Default)] | ||
39 | pub struct TokenMap { | ||
40 | /// Maps `tt::TokenId` to the *relative* source range. | ||
41 | entries: Vec<(tt::TokenId, TokenTextRange)>, | ||
42 | } | ||
43 | 15 | ||
44 | /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro | 16 | /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro |
45 | /// will consume). | 17 | /// will consume). |
@@ -53,7 +25,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) { | |||
53 | let global_offset = node.text_range().start(); | 25 | let global_offset = node.text_range().start(); |
54 | let mut c = Convertor::new(node, global_offset); | 26 | let mut c = Convertor::new(node, global_offset); |
55 | let subtree = c.go(); | 27 | let subtree = c.go(); |
56 | c.id_alloc.map.entries.shrink_to_fit(); | 28 | c.id_alloc.map.shrink_to_fit(); |
57 | (subtree, c.id_alloc.map) | 29 | (subtree, c.id_alloc.map) |
58 | } | 30 | } |
59 | 31 | ||
@@ -149,55 +121,6 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> { | |||
149 | res | 121 | res |
150 | } | 122 | } |
151 | 123 | ||
152 | impl TokenMap { | ||
153 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { | ||
154 | let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { | ||
155 | TokenTextRange::Token(it) => *it == relative_range, | ||
156 | TokenTextRange::Delimiter(it) => { | ||
157 | let open = TextRange::at(it.start(), 1.into()); | ||
158 | let close = TextRange::at(it.end() - TextSize::of('}'), 1.into()); | ||
159 | open == relative_range || close == relative_range | ||
160 | } | ||
161 | })?; | ||
162 | Some(token_id) | ||
163 | } | ||
164 | |||
165 | pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TokenTextRange> { | ||
166 | let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?; | ||
167 | Some(range) | ||
168 | } | ||
169 | |||
170 | fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { | ||
171 | self.entries.push((token_id, TokenTextRange::Token(relative_range))); | ||
172 | } | ||
173 | |||
174 | fn insert_delim( | ||
175 | &mut self, | ||
176 | token_id: tt::TokenId, | ||
177 | open_relative_range: TextRange, | ||
178 | close_relative_range: TextRange, | ||
179 | ) -> usize { | ||
180 | let res = self.entries.len(); | ||
181 | let cover = open_relative_range.cover(close_relative_range); | ||
182 | |||
183 | self.entries.push((token_id, TokenTextRange::Delimiter(cover))); | ||
184 | res | ||
185 | } | ||
186 | |||
187 | fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { | ||
188 | let (_, token_text_range) = &mut self.entries[idx]; | ||
189 | if let TokenTextRange::Delimiter(dim) = token_text_range { | ||
190 | let cover = dim.cover(close_relative_range); | ||
191 | *token_text_range = TokenTextRange::Delimiter(cover); | ||
192 | } | ||
193 | } | ||
194 | |||
195 | fn remove_delim(&mut self, idx: usize) { | ||
196 | // FIXME: This could be accidentally quadratic | ||
197 | self.entries.remove(idx); | ||
198 | } | ||
199 | } | ||
200 | |||
201 | /// Returns the textual content of a doc comment block as a quoted string | 124 | /// Returns the textual content of a doc comment block as a quoted string |
202 | /// That is, strips leading `///` (or `/**`, etc) | 125 | /// That is, strips leading `///` (or `/**`, etc) |
203 | /// and strips the ending `*/` | 126 | /// and strips the ending `*/` |
@@ -634,7 +557,7 @@ impl<'a> TtTreeSink<'a> { | |||
634 | } | 557 | } |
635 | 558 | ||
636 | fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) { | 559 | fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) { |
637 | self.token_map.entries.shrink_to_fit(); | 560 | self.token_map.shrink_to_fit(); |
638 | (self.inner.finish(), self.token_map) | 561 | (self.inner.finish(), self.token_map) |
639 | } | 562 | } |
640 | } | 563 | } |
diff --git a/crates/mbe/src/tests/expand.rs b/crates/mbe/src/tests/expand.rs index 3a1d840ea..5f173f513 100644 --- a/crates/mbe/src/tests/expand.rs +++ b/crates/mbe/src/tests/expand.rs | |||
@@ -58,9 +58,8 @@ macro_rules! foobar { | |||
58 | let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap(); | 58 | let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap(); |
59 | let content = node.syntax_node().to_string(); | 59 | let content = node.syntax_node().to_string(); |
60 | 60 | ||
61 | let get_text = |id, kind| -> String { | 61 | let get_text = |
62 | content[token_map.range_by_token(id).unwrap().by_kind(kind).unwrap()].to_string() | 62 | |id, kind| -> String { content[token_map.range_by_token(id, kind).unwrap()].to_string() }; |
63 | }; | ||
64 | 63 | ||
65 | assert_eq!(expanded.token_trees.len(), 4); | 64 | assert_eq!(expanded.token_trees.len(), 4); |
66 | // {($e:ident) => { fn $e() {} }} | 65 | // {($e:ident) => { fn $e() {} }} |
diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs new file mode 100644 index 000000000..6df3de3b3 --- /dev/null +++ b/crates/mbe/src/token_map.rs | |||
@@ -0,0 +1,85 @@ | |||
1 | //! Mapping between `TokenId`s and the token's position in macro definitions or inputs. | ||
2 | |||
3 | use parser::{SyntaxKind, T}; | ||
4 | use syntax::{TextRange, TextSize}; | ||
5 | |||
6 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | ||
7 | enum TokenTextRange { | ||
8 | Token(TextRange), | ||
9 | Delimiter(TextRange), | ||
10 | } | ||
11 | |||
12 | impl TokenTextRange { | ||
13 | fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> { | ||
14 | match self { | ||
15 | TokenTextRange::Token(it) => Some(it), | ||
16 | TokenTextRange::Delimiter(it) => match kind { | ||
17 | T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())), | ||
18 | T!['}'] | T![')'] | T![']'] => { | ||
19 | Some(TextRange::at(it.end() - TextSize::of('}'), 1.into())) | ||
20 | } | ||
21 | _ => None, | ||
22 | }, | ||
23 | } | ||
24 | } | ||
25 | } | ||
26 | |||
27 | /// Maps `tt::TokenId` to the relative range of the original token. | ||
28 | #[derive(Debug, PartialEq, Eq, Clone, Default)] | ||
29 | pub struct TokenMap { | ||
30 | /// Maps `tt::TokenId` to the *relative* source range. | ||
31 | entries: Vec<(tt::TokenId, TokenTextRange)>, | ||
32 | } | ||
33 | |||
34 | impl TokenMap { | ||
35 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { | ||
36 | let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { | ||
37 | TokenTextRange::Token(it) => *it == relative_range, | ||
38 | TokenTextRange::Delimiter(it) => { | ||
39 | let open = TextRange::at(it.start(), 1.into()); | ||
40 | let close = TextRange::at(it.end() - TextSize::of('}'), 1.into()); | ||
41 | open == relative_range || close == relative_range | ||
42 | } | ||
43 | })?; | ||
44 | Some(token_id) | ||
45 | } | ||
46 | |||
47 | pub fn range_by_token(&self, token_id: tt::TokenId, kind: SyntaxKind) -> Option<TextRange> { | ||
48 | let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?; | ||
49 | range.by_kind(kind) | ||
50 | } | ||
51 | |||
52 | pub(crate) fn shrink_to_fit(&mut self) { | ||
53 | self.entries.shrink_to_fit(); | ||
54 | } | ||
55 | |||
56 | pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { | ||
57 | self.entries.push((token_id, TokenTextRange::Token(relative_range))); | ||
58 | } | ||
59 | |||
60 | pub(crate) fn insert_delim( | ||
61 | &mut self, | ||
62 | token_id: tt::TokenId, | ||
63 | open_relative_range: TextRange, | ||
64 | close_relative_range: TextRange, | ||
65 | ) -> usize { | ||
66 | let res = self.entries.len(); | ||
67 | let cover = open_relative_range.cover(close_relative_range); | ||
68 | |||
69 | self.entries.push((token_id, TokenTextRange::Delimiter(cover))); | ||
70 | res | ||
71 | } | ||
72 | |||
73 | pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { | ||
74 | let (_, token_text_range) = &mut self.entries[idx]; | ||
75 | if let TokenTextRange::Delimiter(dim) = token_text_range { | ||
76 | let cover = dim.cover(close_relative_range); | ||
77 | *token_text_range = TokenTextRange::Delimiter(cover); | ||
78 | } | ||
79 | } | ||
80 | |||
81 | pub(crate) fn remove_delim(&mut self, idx: usize) { | ||
82 | // FIXME: This could be accidentally quadratic | ||
83 | self.entries.remove(idx); | ||
84 | } | ||
85 | } | ||
diff --git a/crates/project_model/src/build_data.rs b/crates/project_model/src/build_data.rs index 3aa546980..33a4f8168 100644 --- a/crates/project_model/src/build_data.rs +++ b/crates/project_model/src/build_data.rs | |||
@@ -214,7 +214,7 @@ impl WorkspaceBuildData { | |||
214 | acc | 214 | acc |
215 | }; | 215 | }; |
216 | let package_build_data = | 216 | let package_build_data = |
217 | res.per_package.entry(package_id.repr.clone()).or_default(); | 217 | res.per_package.entry(package_id.repr).or_default(); |
218 | // cargo_metadata crate returns default (empty) path for | 218 | // cargo_metadata crate returns default (empty) path for |
219 | // older cargos, which is not absolute, so work around that. | 219 | // older cargos, which is not absolute, so work around that. |
220 | if !out_dir.as_str().is_empty() { | 220 | if !out_dir.as_str().is_empty() { |
@@ -237,13 +237,13 @@ impl WorkspaceBuildData { | |||
237 | { | 237 | { |
238 | let filename = AbsPathBuf::assert(PathBuf::from(&filename)); | 238 | let filename = AbsPathBuf::assert(PathBuf::from(&filename)); |
239 | let package_build_data = | 239 | let package_build_data = |
240 | res.per_package.entry(package_id.repr.clone()).or_default(); | 240 | res.per_package.entry(package_id.repr).or_default(); |
241 | package_build_data.proc_macro_dylib_path = Some(filename); | 241 | package_build_data.proc_macro_dylib_path = Some(filename); |
242 | } | 242 | } |
243 | } | 243 | } |
244 | } | 244 | } |
245 | Message::CompilerMessage(message) => { | 245 | Message::CompilerMessage(message) => { |
246 | progress(message.target.name.clone()); | 246 | progress(message.target.name); |
247 | } | 247 | } |
248 | Message::BuildFinished(_) => {} | 248 | Message::BuildFinished(_) => {} |
249 | Message::TextLine(_) => {} | 249 | Message::TextLine(_) => {} |
diff --git a/crates/project_model/src/cargo_workspace.rs b/crates/project_model/src/cargo_workspace.rs index ad705c752..b8ad08364 100644 --- a/crates/project_model/src/cargo_workspace.rs +++ b/crates/project_model/src/cargo_workspace.rs | |||
@@ -121,7 +121,7 @@ pub struct PackageDependency { | |||
121 | pub kind: DepKind, | 121 | pub kind: DepKind, |
122 | } | 122 | } |
123 | 123 | ||
124 | #[derive(Debug, Clone, Eq, PartialEq)] | 124 | #[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Ord)] |
125 | pub enum DepKind { | 125 | pub enum DepKind { |
126 | /// Available to the library, binary, and dev targets in the package (but not the build script). | 126 | /// Available to the library, binary, and dev targets in the package (but not the build script). |
127 | Normal, | 127 | Normal, |
@@ -132,17 +132,23 @@ pub enum DepKind { | |||
132 | } | 132 | } |
133 | 133 | ||
134 | impl DepKind { | 134 | impl DepKind { |
135 | fn new(list: &[cargo_metadata::DepKindInfo]) -> Self { | 135 | fn iter(list: &[cargo_metadata::DepKindInfo]) -> impl Iterator<Item = Self> + '_ { |
136 | let mut dep_kinds = Vec::new(); | ||
137 | if list.is_empty() { | ||
138 | dep_kinds.push(Self::Normal); | ||
139 | } | ||
136 | for info in list { | 140 | for info in list { |
137 | match info.kind { | 141 | let kind = match info.kind { |
138 | cargo_metadata::DependencyKind::Normal => return Self::Normal, | 142 | cargo_metadata::DependencyKind::Normal => Self::Normal, |
139 | cargo_metadata::DependencyKind::Development => return Self::Dev, | 143 | cargo_metadata::DependencyKind::Development => Self::Dev, |
140 | cargo_metadata::DependencyKind::Build => return Self::Build, | 144 | cargo_metadata::DependencyKind::Build => Self::Build, |
141 | cargo_metadata::DependencyKind::Unknown => continue, | 145 | cargo_metadata::DependencyKind::Unknown => continue, |
142 | } | 146 | }; |
147 | dep_kinds.push(kind); | ||
143 | } | 148 | } |
144 | 149 | dep_kinds.sort_unstable(); | |
145 | Self::Normal | 150 | dep_kinds.dedup(); |
151 | dep_kinds.into_iter() | ||
146 | } | 152 | } |
147 | } | 153 | } |
148 | 154 | ||
@@ -317,7 +323,11 @@ impl CargoWorkspace { | |||
317 | } | 323 | } |
318 | }; | 324 | }; |
319 | node.deps.sort_by(|a, b| a.pkg.cmp(&b.pkg)); | 325 | node.deps.sort_by(|a, b| a.pkg.cmp(&b.pkg)); |
320 | for dep_node in node.deps { | 326 | for (dep_node, kind) in node |
327 | .deps | ||
328 | .iter() | ||
329 | .flat_map(|dep| DepKind::iter(&dep.dep_kinds).map(move |kind| (dep, kind))) | ||
330 | { | ||
321 | let pkg = match pkg_by_id.get(&dep_node.pkg) { | 331 | let pkg = match pkg_by_id.get(&dep_node.pkg) { |
322 | Some(&pkg) => pkg, | 332 | Some(&pkg) => pkg, |
323 | None => { | 333 | None => { |
@@ -328,11 +338,7 @@ impl CargoWorkspace { | |||
328 | continue; | 338 | continue; |
329 | } | 339 | } |
330 | }; | 340 | }; |
331 | let dep = PackageDependency { | 341 | let dep = PackageDependency { name: dep_node.name.clone(), pkg, kind }; |
332 | name: dep_node.name, | ||
333 | pkg, | ||
334 | kind: DepKind::new(&dep_node.dep_kinds), | ||
335 | }; | ||
336 | packages[source].dependencies.push(dep); | 342 | packages[source].dependencies.push(dep); |
337 | } | 343 | } |
338 | packages[source].active_features.extend(node.features); | 344 | packages[source].active_features.extend(node.features); |
@@ -340,11 +346,8 @@ impl CargoWorkspace { | |||
340 | 346 | ||
341 | let workspace_root = | 347 | let workspace_root = |
342 | AbsPathBuf::assert(PathBuf::from(meta.workspace_root.into_os_string())); | 348 | AbsPathBuf::assert(PathBuf::from(meta.workspace_root.into_os_string())); |
343 | let build_data_config = BuildDataConfig::new( | 349 | let build_data_config = |
344 | cargo_toml.to_path_buf(), | 350 | BuildDataConfig::new(cargo_toml.to_path_buf(), config.clone(), Arc::new(meta.packages)); |
345 | config.clone(), | ||
346 | Arc::new(meta.packages.clone()), | ||
347 | ); | ||
348 | 351 | ||
349 | Ok(CargoWorkspace { packages, targets, workspace_root, build_data_config }) | 352 | Ok(CargoWorkspace { packages, targets, workspace_root, build_data_config }) |
350 | } | 353 | } |
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 6c883dd58..2b842d393 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs | |||
@@ -158,7 +158,24 @@ fn run_server() -> Result<()> { | |||
158 | let initialize_params = | 158 | let initialize_params = |
159 | from_json::<lsp_types::InitializeParams>("InitializeParams", initialize_params)?; | 159 | from_json::<lsp_types::InitializeParams>("InitializeParams", initialize_params)?; |
160 | 160 | ||
161 | let server_capabilities = rust_analyzer::server_capabilities(&initialize_params.capabilities); | 161 | let root_path = match initialize_params |
162 | .root_uri | ||
163 | .and_then(|it| it.to_file_path().ok()) | ||
164 | .and_then(|it| AbsPathBuf::try_from(it).ok()) | ||
165 | { | ||
166 | Some(it) => it, | ||
167 | None => { | ||
168 | let cwd = env::current_dir()?; | ||
169 | AbsPathBuf::assert(cwd) | ||
170 | } | ||
171 | }; | ||
172 | |||
173 | let mut config = Config::new(root_path, initialize_params.capabilities); | ||
174 | if let Some(json) = initialize_params.initialization_options { | ||
175 | config.update(json); | ||
176 | } | ||
177 | |||
178 | let server_capabilities = rust_analyzer::server_capabilities(&config); | ||
162 | 179 | ||
163 | let initialize_result = lsp_types::InitializeResult { | 180 | let initialize_result = lsp_types::InitializeResult { |
164 | capabilities: server_capabilities, | 181 | capabilities: server_capabilities, |
@@ -166,11 +183,7 @@ fn run_server() -> Result<()> { | |||
166 | name: String::from("rust-analyzer"), | 183 | name: String::from("rust-analyzer"), |
167 | version: Some(String::from(env!("REV"))), | 184 | version: Some(String::from(env!("REV"))), |
168 | }), | 185 | }), |
169 | offset_encoding: if supports_utf8(&initialize_params.capabilities) { | 186 | offset_encoding: if supports_utf8(&config.caps) { Some("utf-8".to_string()) } else { None }, |
170 | Some("utf-8".to_string()) | ||
171 | } else { | ||
172 | None | ||
173 | }, | ||
174 | }; | 187 | }; |
175 | 188 | ||
176 | let initialize_result = serde_json::to_value(initialize_result).unwrap(); | 189 | let initialize_result = serde_json::to_value(initialize_result).unwrap(); |
@@ -181,47 +194,26 @@ fn run_server() -> Result<()> { | |||
181 | log::info!("Client '{}' {}", client_info.name, client_info.version.unwrap_or_default()); | 194 | log::info!("Client '{}' {}", client_info.name, client_info.version.unwrap_or_default()); |
182 | } | 195 | } |
183 | 196 | ||
184 | let config = { | 197 | if config.linked_projects().is_empty() && config.detached_files().is_empty() { |
185 | let root_path = match initialize_params | 198 | let workspace_roots = initialize_params |
186 | .root_uri | 199 | .workspace_folders |
187 | .and_then(|it| it.to_file_path().ok()) | 200 | .map(|workspaces| { |
188 | .and_then(|it| AbsPathBuf::try_from(it).ok()) | 201 | workspaces |
189 | { | 202 | .into_iter() |
190 | Some(it) => it, | 203 | .filter_map(|it| it.uri.to_file_path().ok()) |
191 | None => { | 204 | .filter_map(|it| AbsPathBuf::try_from(it).ok()) |
192 | let cwd = env::current_dir()?; | 205 | .collect::<Vec<_>>() |
193 | AbsPathBuf::assert(cwd) | 206 | }) |
194 | } | 207 | .filter(|workspaces| !workspaces.is_empty()) |
195 | }; | 208 | .unwrap_or_else(|| vec![config.root_path.clone()]); |
196 | 209 | ||
197 | let mut config = Config::new(root_path, initialize_params.capabilities); | 210 | let discovered = ProjectManifest::discover_all(&workspace_roots); |
198 | if let Some(json) = initialize_params.initialization_options { | 211 | log::info!("discovered projects: {:?}", discovered); |
199 | config.update(json); | 212 | if discovered.is_empty() { |
200 | } | 213 | log::error!("failed to find any projects in {:?}", workspace_roots); |
201 | |||
202 | if config.linked_projects().is_empty() && config.detached_files().is_empty() { | ||
203 | let workspace_roots = initialize_params | ||
204 | .workspace_folders | ||
205 | .map(|workspaces| { | ||
206 | workspaces | ||
207 | .into_iter() | ||
208 | .filter_map(|it| it.uri.to_file_path().ok()) | ||
209 | .filter_map(|it| AbsPathBuf::try_from(it).ok()) | ||
210 | .collect::<Vec<_>>() | ||
211 | }) | ||
212 | .filter(|workspaces| !workspaces.is_empty()) | ||
213 | .unwrap_or_else(|| vec![config.root_path.clone()]); | ||
214 | |||
215 | let discovered = ProjectManifest::discover_all(&workspace_roots); | ||
216 | log::info!("discovered projects: {:?}", discovered); | ||
217 | if discovered.is_empty() { | ||
218 | log::error!("failed to find any projects in {:?}", workspace_roots); | ||
219 | } | ||
220 | config.discovered_projects = Some(discovered); | ||
221 | } | 214 | } |
222 | 215 | config.discovered_projects = Some(discovered); | |
223 | config | 216 | } |
224 | }; | ||
225 | 217 | ||
226 | rust_analyzer::main_loop(config, connection)?; | 218 | rust_analyzer::main_loop(config, connection)?; |
227 | 219 | ||
diff --git a/crates/rust-analyzer/src/caps.rs b/crates/rust-analyzer/src/caps.rs index b2317618a..fe5255240 100644 --- a/crates/rust-analyzer/src/caps.rs +++ b/crates/rust-analyzer/src/caps.rs | |||
@@ -1,6 +1,4 @@ | |||
1 | //! Advertizes the capabilities of the LSP Server. | 1 | //! Advertises the capabilities of the LSP Server. |
2 | use std::env; | ||
3 | |||
4 | use lsp_types::{ | 2 | use lsp_types::{ |
5 | CallHierarchyServerCapability, ClientCapabilities, CodeActionKind, CodeActionOptions, | 3 | CallHierarchyServerCapability, ClientCapabilities, CodeActionKind, CodeActionOptions, |
6 | CodeActionProviderCapability, CodeLensOptions, CompletionOptions, | 4 | CodeActionProviderCapability, CodeLensOptions, CompletionOptions, |
@@ -15,24 +13,21 @@ use lsp_types::{ | |||
15 | }; | 13 | }; |
16 | use serde_json::json; | 14 | use serde_json::json; |
17 | 15 | ||
16 | use crate::config::{Config, RustfmtConfig}; | ||
18 | use crate::semantic_tokens; | 17 | use crate::semantic_tokens; |
19 | 18 | ||
20 | pub fn server_capabilities(client_caps: &ClientCapabilities) -> ServerCapabilities { | 19 | pub fn server_capabilities(config: &Config) -> ServerCapabilities { |
21 | ServerCapabilities { | 20 | ServerCapabilities { |
22 | text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions { | 21 | text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions { |
23 | open_close: Some(true), | 22 | open_close: Some(true), |
24 | change: Some(if env::var("RA_NO_INCREMENTAL_SYNC").is_ok() { | 23 | change: Some(TextDocumentSyncKind::Incremental), |
25 | TextDocumentSyncKind::Full | ||
26 | } else { | ||
27 | TextDocumentSyncKind::Incremental | ||
28 | }), | ||
29 | will_save: None, | 24 | will_save: None, |
30 | will_save_wait_until: None, | 25 | will_save_wait_until: None, |
31 | save: Some(SaveOptions::default().into()), | 26 | save: Some(SaveOptions::default().into()), |
32 | })), | 27 | })), |
33 | hover_provider: Some(HoverProviderCapability::Simple(true)), | 28 | hover_provider: Some(HoverProviderCapability::Simple(true)), |
34 | completion_provider: Some(CompletionOptions { | 29 | completion_provider: Some(CompletionOptions { |
35 | resolve_provider: completions_resolve_provider(client_caps), | 30 | resolve_provider: completions_resolve_provider(&config.caps), |
36 | trigger_characters: Some(vec![":".to_string(), ".".to_string(), "'".to_string()]), | 31 | trigger_characters: Some(vec![":".to_string(), ".".to_string(), "'".to_string()]), |
37 | all_commit_characters: None, | 32 | all_commit_characters: None, |
38 | completion_item: None, | 33 | completion_item: None, |
@@ -51,10 +46,13 @@ pub fn server_capabilities(client_caps: &ClientCapabilities) -> ServerCapabiliti | |||
51 | document_highlight_provider: Some(OneOf::Left(true)), | 46 | document_highlight_provider: Some(OneOf::Left(true)), |
52 | document_symbol_provider: Some(OneOf::Left(true)), | 47 | document_symbol_provider: Some(OneOf::Left(true)), |
53 | workspace_symbol_provider: Some(OneOf::Left(true)), | 48 | workspace_symbol_provider: Some(OneOf::Left(true)), |
54 | code_action_provider: Some(code_action_capabilities(client_caps)), | 49 | code_action_provider: Some(code_action_capabilities(&config.caps)), |
55 | code_lens_provider: Some(CodeLensOptions { resolve_provider: Some(true) }), | 50 | code_lens_provider: Some(CodeLensOptions { resolve_provider: Some(true) }), |
56 | document_formatting_provider: Some(OneOf::Left(true)), | 51 | document_formatting_provider: Some(OneOf::Left(true)), |
57 | document_range_formatting_provider: None, | 52 | document_range_formatting_provider: match config.rustfmt() { |
53 | RustfmtConfig::Rustfmt { enable_range_formatting: true, .. } => Some(OneOf::Left(true)), | ||
54 | _ => Some(OneOf::Left(false)), | ||
55 | }, | ||
58 | document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions { | 56 | document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions { |
59 | first_trigger_character: "=".to_string(), | 57 | first_trigger_character: "=".to_string(), |
60 | more_trigger_character: Some(vec![".".to_string(), ">".to_string(), "{".to_string()]), | 58 | more_trigger_character: Some(vec![".".to_string(), ">".to_string(), "{".to_string()]), |
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 7c02a507c..a67b0bb25 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs | |||
@@ -111,15 +111,15 @@ config_data! { | |||
111 | /// Map of prefixes to be substituted when parsing diagnostic file paths. | 111 | /// Map of prefixes to be substituted when parsing diagnostic file paths. |
112 | /// This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`. | 112 | /// This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`. |
113 | diagnostics_remapPrefix: FxHashMap<String, String> = "{}", | 113 | diagnostics_remapPrefix: FxHashMap<String, String> = "{}", |
114 | /// List of warnings that should be displayed with info severity. | ||
115 | /// | ||
116 | /// The warnings will be indicated by a blue squiggly underline in code | ||
117 | /// and a blue icon in the `Problems Panel`. | ||
118 | diagnostics_warningsAsHint: Vec<String> = "[]", | ||
119 | /// List of warnings that should be displayed with hint severity. | 114 | /// List of warnings that should be displayed with hint severity. |
120 | /// | 115 | /// |
121 | /// The warnings will be indicated by faded text or three dots in code | 116 | /// The warnings will be indicated by faded text or three dots in code |
122 | /// and will not show up in the `Problems Panel`. | 117 | /// and will not show up in the `Problems Panel`. |
118 | diagnostics_warningsAsHint: Vec<String> = "[]", | ||
119 | /// List of warnings that should be displayed with info severity. | ||
120 | /// | ||
121 | /// The warnings will be indicated by a blue squiggly underline in code | ||
122 | /// and a blue icon in the `Problems Panel`. | ||
123 | diagnostics_warningsAsInfo: Vec<String> = "[]", | 123 | diagnostics_warningsAsInfo: Vec<String> = "[]", |
124 | 124 | ||
125 | /// Controls file watching implementation. | 125 | /// Controls file watching implementation. |
@@ -218,6 +218,10 @@ config_data! { | |||
218 | /// Advanced option, fully override the command rust-analyzer uses for | 218 | /// Advanced option, fully override the command rust-analyzer uses for |
219 | /// formatting. | 219 | /// formatting. |
220 | rustfmt_overrideCommand: Option<Vec<String>> = "null", | 220 | rustfmt_overrideCommand: Option<Vec<String>> = "null", |
221 | /// Enables the use of rustfmt's unstable range formatting command for the | ||
222 | /// `textDocument/rangeFormatting` request. The rustfmt option is unstable and only | ||
223 | /// available on a nightly build. | ||
224 | rustfmt_enableRangeFormatting: bool = "false", | ||
221 | 225 | ||
222 | /// Workspace symbol search scope. | 226 | /// Workspace symbol search scope. |
223 | workspace_symbol_search_scope: WorskpaceSymbolSearchScopeDef = "\"workspace\"", | 227 | workspace_symbol_search_scope: WorskpaceSymbolSearchScopeDef = "\"workspace\"", |
@@ -234,7 +238,7 @@ impl Default for ConfigData { | |||
234 | 238 | ||
235 | #[derive(Debug, Clone)] | 239 | #[derive(Debug, Clone)] |
236 | pub struct Config { | 240 | pub struct Config { |
237 | caps: lsp_types::ClientCapabilities, | 241 | pub caps: lsp_types::ClientCapabilities, |
238 | data: ConfigData, | 242 | data: ConfigData, |
239 | detached_files: Vec<AbsPathBuf>, | 243 | detached_files: Vec<AbsPathBuf>, |
240 | pub discovered_projects: Option<Vec<ProjectManifest>>, | 244 | pub discovered_projects: Option<Vec<ProjectManifest>>, |
@@ -305,7 +309,7 @@ pub struct NotificationsConfig { | |||
305 | 309 | ||
306 | #[derive(Debug, Clone)] | 310 | #[derive(Debug, Clone)] |
307 | pub enum RustfmtConfig { | 311 | pub enum RustfmtConfig { |
308 | Rustfmt { extra_args: Vec<String> }, | 312 | Rustfmt { extra_args: Vec<String>, enable_range_formatting: bool }, |
309 | CustomCommand { command: String, args: Vec<String> }, | 313 | CustomCommand { command: String, args: Vec<String> }, |
310 | } | 314 | } |
311 | 315 | ||
@@ -584,9 +588,10 @@ impl Config { | |||
584 | let command = args.remove(0); | 588 | let command = args.remove(0); |
585 | RustfmtConfig::CustomCommand { command, args } | 589 | RustfmtConfig::CustomCommand { command, args } |
586 | } | 590 | } |
587 | Some(_) | None => { | 591 | Some(_) | None => RustfmtConfig::Rustfmt { |
588 | RustfmtConfig::Rustfmt { extra_args: self.data.rustfmt_extraArgs.clone() } | 592 | extra_args: self.data.rustfmt_extraArgs.clone(), |
589 | } | 593 | enable_range_formatting: self.data.rustfmt_enableRangeFormatting, |
594 | }, | ||
590 | } | 595 | } |
591 | } | 596 | } |
592 | pub fn flycheck(&self) -> Option<FlycheckConfig> { | 597 | pub fn flycheck(&self) -> Option<FlycheckConfig> { |
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index f48210424..456744603 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs | |||
@@ -27,7 +27,7 @@ use lsp_types::{ | |||
27 | }; | 27 | }; |
28 | use project_model::TargetKind; | 28 | use project_model::TargetKind; |
29 | use serde::{Deserialize, Serialize}; | 29 | use serde::{Deserialize, Serialize}; |
30 | use serde_json::to_value; | 30 | use serde_json::{json, to_value}; |
31 | use stdx::format_to; | 31 | use stdx::format_to; |
32 | use syntax::{algo, ast, AstNode, TextRange, TextSize}; | 32 | use syntax::{algo, ast, AstNode, TextRange, TextSize}; |
33 | 33 | ||
@@ -955,104 +955,17 @@ pub(crate) fn handle_formatting( | |||
955 | params: DocumentFormattingParams, | 955 | params: DocumentFormattingParams, |
956 | ) -> Result<Option<Vec<lsp_types::TextEdit>>> { | 956 | ) -> Result<Option<Vec<lsp_types::TextEdit>>> { |
957 | let _p = profile::span("handle_formatting"); | 957 | let _p = profile::span("handle_formatting"); |
958 | let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; | ||
959 | let file = snap.analysis.file_text(file_id)?; | ||
960 | let crate_ids = snap.analysis.crate_for(file_id)?; | ||
961 | |||
962 | let line_index = snap.file_line_index(file_id)?; | ||
963 | |||
964 | let mut rustfmt = match snap.config.rustfmt() { | ||
965 | RustfmtConfig::Rustfmt { extra_args } => { | ||
966 | let mut cmd = process::Command::new(toolchain::rustfmt()); | ||
967 | cmd.args(extra_args); | ||
968 | // try to chdir to the file so we can respect `rustfmt.toml` | ||
969 | // FIXME: use `rustfmt --config-path` once | ||
970 | // https://github.com/rust-lang/rustfmt/issues/4660 gets fixed | ||
971 | match params.text_document.uri.to_file_path() { | ||
972 | Ok(mut path) => { | ||
973 | // pop off file name | ||
974 | if path.pop() && path.is_dir() { | ||
975 | cmd.current_dir(path); | ||
976 | } | ||
977 | } | ||
978 | Err(_) => { | ||
979 | log::error!( | ||
980 | "Unable to get file path for {}, rustfmt.toml might be ignored", | ||
981 | params.text_document.uri | ||
982 | ); | ||
983 | } | ||
984 | } | ||
985 | if let Some(&crate_id) = crate_ids.first() { | ||
986 | // Assume all crates are in the same edition | ||
987 | let edition = snap.analysis.crate_edition(crate_id)?; | ||
988 | cmd.arg("--edition"); | ||
989 | cmd.arg(edition.to_string()); | ||
990 | } | ||
991 | cmd | ||
992 | } | ||
993 | RustfmtConfig::CustomCommand { command, args } => { | ||
994 | let mut cmd = process::Command::new(command); | ||
995 | cmd.args(args); | ||
996 | cmd | ||
997 | } | ||
998 | }; | ||
999 | 958 | ||
1000 | let mut rustfmt = | 959 | run_rustfmt(&snap, params.text_document, None) |
1001 | rustfmt.stdin(Stdio::piped()).stdout(Stdio::piped()).stderr(Stdio::piped()).spawn()?; | 960 | } |
1002 | |||
1003 | rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?; | ||
1004 | |||
1005 | let output = rustfmt.wait_with_output()?; | ||
1006 | let captured_stdout = String::from_utf8(output.stdout)?; | ||
1007 | let captured_stderr = String::from_utf8(output.stderr).unwrap_or_default(); | ||
1008 | |||
1009 | if !output.status.success() { | ||
1010 | let rustfmt_not_installed = | ||
1011 | captured_stderr.contains("not installed") || captured_stderr.contains("not available"); | ||
1012 | |||
1013 | return match output.status.code() { | ||
1014 | Some(1) if !rustfmt_not_installed => { | ||
1015 | // While `rustfmt` doesn't have a specific exit code for parse errors this is the | ||
1016 | // likely cause exiting with 1. Most Language Servers swallow parse errors on | ||
1017 | // formatting because otherwise an error is surfaced to the user on top of the | ||
1018 | // syntax error diagnostics they're already receiving. This is especially jarring | ||
1019 | // if they have format on save enabled. | ||
1020 | log::info!("rustfmt exited with status 1, assuming parse error and ignoring"); | ||
1021 | Ok(None) | ||
1022 | } | ||
1023 | _ => { | ||
1024 | // Something else happened - e.g. `rustfmt` is missing or caught a signal | ||
1025 | Err(LspError::new( | ||
1026 | -32900, | ||
1027 | format!( | ||
1028 | r#"rustfmt exited with: | ||
1029 | Status: {} | ||
1030 | stdout: {} | ||
1031 | stderr: {}"#, | ||
1032 | output.status, captured_stdout, captured_stderr, | ||
1033 | ), | ||
1034 | ) | ||
1035 | .into()) | ||
1036 | } | ||
1037 | }; | ||
1038 | } | ||
1039 | 961 | ||
1040 | let (new_text, new_line_endings) = LineEndings::normalize(captured_stdout); | 962 | pub(crate) fn handle_range_formatting( |
963 | snap: GlobalStateSnapshot, | ||
964 | params: lsp_types::DocumentRangeFormattingParams, | ||
965 | ) -> Result<Option<Vec<lsp_types::TextEdit>>> { | ||
966 | let _p = profile::span("handle_range_formatting"); | ||
1041 | 967 | ||
1042 | if line_index.endings != new_line_endings { | 968 | run_rustfmt(&snap, params.text_document, Some(params.range)) |
1043 | // If line endings are different, send the entire file. | ||
1044 | // Diffing would not work here, as the line endings might be the only | ||
1045 | // difference. | ||
1046 | Ok(Some(to_proto::text_edit_vec( | ||
1047 | &line_index, | ||
1048 | TextEdit::replace(TextRange::up_to(TextSize::of(&*file)), new_text), | ||
1049 | ))) | ||
1050 | } else if *file == new_text { | ||
1051 | // The document is already formatted correctly -- no edits needed. | ||
1052 | Ok(None) | ||
1053 | } else { | ||
1054 | Ok(Some(to_proto::text_edit_vec(&line_index, diff(&file, &new_text)))) | ||
1055 | } | ||
1056 | } | 969 | } |
1057 | 970 | ||
1058 | pub(crate) fn handle_code_action( | 971 | pub(crate) fn handle_code_action( |
@@ -1675,6 +1588,140 @@ fn should_skip_target(runnable: &Runnable, cargo_spec: Option<&CargoTargetSpec>) | |||
1675 | } | 1588 | } |
1676 | } | 1589 | } |
1677 | 1590 | ||
1591 | fn run_rustfmt( | ||
1592 | snap: &GlobalStateSnapshot, | ||
1593 | text_document: TextDocumentIdentifier, | ||
1594 | range: Option<lsp_types::Range>, | ||
1595 | ) -> Result<Option<Vec<lsp_types::TextEdit>>> { | ||
1596 | let file_id = from_proto::file_id(&snap, &text_document.uri)?; | ||
1597 | let file = snap.analysis.file_text(file_id)?; | ||
1598 | let crate_ids = snap.analysis.crate_for(file_id)?; | ||
1599 | |||
1600 | let line_index = snap.file_line_index(file_id)?; | ||
1601 | |||
1602 | let mut rustfmt = match snap.config.rustfmt() { | ||
1603 | RustfmtConfig::Rustfmt { extra_args, enable_range_formatting } => { | ||
1604 | let mut cmd = process::Command::new(toolchain::rustfmt()); | ||
1605 | cmd.args(extra_args); | ||
1606 | // try to chdir to the file so we can respect `rustfmt.toml` | ||
1607 | // FIXME: use `rustfmt --config-path` once | ||
1608 | // https://github.com/rust-lang/rustfmt/issues/4660 gets fixed | ||
1609 | match text_document.uri.to_file_path() { | ||
1610 | Ok(mut path) => { | ||
1611 | // pop off file name | ||
1612 | if path.pop() && path.is_dir() { | ||
1613 | cmd.current_dir(path); | ||
1614 | } | ||
1615 | } | ||
1616 | Err(_) => { | ||
1617 | log::error!( | ||
1618 | "Unable to get file path for {}, rustfmt.toml might be ignored", | ||
1619 | text_document.uri | ||
1620 | ); | ||
1621 | } | ||
1622 | } | ||
1623 | if let Some(&crate_id) = crate_ids.first() { | ||
1624 | // Assume all crates are in the same edition | ||
1625 | let edition = snap.analysis.crate_edition(crate_id)?; | ||
1626 | cmd.arg("--edition"); | ||
1627 | cmd.arg(edition.to_string()); | ||
1628 | } | ||
1629 | |||
1630 | if let Some(range) = range { | ||
1631 | if !enable_range_formatting { | ||
1632 | return Err(LspError::new( | ||
1633 | ErrorCode::InvalidRequest as i32, | ||
1634 | String::from( | ||
1635 | "rustfmt range formatting is unstable. \ | ||
1636 | Opt-in by using a nightly build of rustfmt and setting \ | ||
1637 | `rustfmt.enableRangeFormatting` to true in your LSP configuration", | ||
1638 | ), | ||
1639 | ) | ||
1640 | .into()); | ||
1641 | } | ||
1642 | |||
1643 | let frange = from_proto::file_range(&snap, text_document.clone(), range)?; | ||
1644 | let start_line = line_index.index.line_col(frange.range.start()).line; | ||
1645 | let end_line = line_index.index.line_col(frange.range.end()).line; | ||
1646 | |||
1647 | cmd.arg("--unstable-features"); | ||
1648 | cmd.arg("--file-lines"); | ||
1649 | cmd.arg( | ||
1650 | json!([{ | ||
1651 | "file": "stdin", | ||
1652 | "range": [start_line, end_line] | ||
1653 | }]) | ||
1654 | .to_string(), | ||
1655 | ); | ||
1656 | } | ||
1657 | |||
1658 | cmd | ||
1659 | } | ||
1660 | RustfmtConfig::CustomCommand { command, args } => { | ||
1661 | let mut cmd = process::Command::new(command); | ||
1662 | cmd.args(args); | ||
1663 | cmd | ||
1664 | } | ||
1665 | }; | ||
1666 | |||
1667 | let mut rustfmt = | ||
1668 | rustfmt.stdin(Stdio::piped()).stdout(Stdio::piped()).stderr(Stdio::piped()).spawn()?; | ||
1669 | |||
1670 | rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?; | ||
1671 | |||
1672 | let output = rustfmt.wait_with_output()?; | ||
1673 | let captured_stdout = String::from_utf8(output.stdout)?; | ||
1674 | let captured_stderr = String::from_utf8(output.stderr).unwrap_or_default(); | ||
1675 | |||
1676 | if !output.status.success() { | ||
1677 | let rustfmt_not_installed = | ||
1678 | captured_stderr.contains("not installed") || captured_stderr.contains("not available"); | ||
1679 | |||
1680 | return match output.status.code() { | ||
1681 | Some(1) if !rustfmt_not_installed => { | ||
1682 | // While `rustfmt` doesn't have a specific exit code for parse errors this is the | ||
1683 | // likely cause exiting with 1. Most Language Servers swallow parse errors on | ||
1684 | // formatting because otherwise an error is surfaced to the user on top of the | ||
1685 | // syntax error diagnostics they're already receiving. This is especially jarring | ||
1686 | // if they have format on save enabled. | ||
1687 | log::info!("rustfmt exited with status 1, assuming parse error and ignoring"); | ||
1688 | Ok(None) | ||
1689 | } | ||
1690 | _ => { | ||
1691 | // Something else happened - e.g. `rustfmt` is missing or caught a signal | ||
1692 | Err(LspError::new( | ||
1693 | -32900, | ||
1694 | format!( | ||
1695 | r#"rustfmt exited with: | ||
1696 | Status: {} | ||
1697 | stdout: {} | ||
1698 | stderr: {}"#, | ||
1699 | output.status, captured_stdout, captured_stderr, | ||
1700 | ), | ||
1701 | ) | ||
1702 | .into()) | ||
1703 | } | ||
1704 | }; | ||
1705 | } | ||
1706 | |||
1707 | let (new_text, new_line_endings) = LineEndings::normalize(captured_stdout); | ||
1708 | |||
1709 | if line_index.endings != new_line_endings { | ||
1710 | // If line endings are different, send the entire file. | ||
1711 | // Diffing would not work here, as the line endings might be the only | ||
1712 | // difference. | ||
1713 | Ok(Some(to_proto::text_edit_vec( | ||
1714 | &line_index, | ||
1715 | TextEdit::replace(TextRange::up_to(TextSize::of(&*file)), new_text), | ||
1716 | ))) | ||
1717 | } else if *file == new_text { | ||
1718 | // The document is already formatted correctly -- no edits needed. | ||
1719 | Ok(None) | ||
1720 | } else { | ||
1721 | Ok(Some(to_proto::text_edit_vec(&line_index, diff(&file, &new_text)))) | ||
1722 | } | ||
1723 | } | ||
1724 | |||
1678 | #[derive(Debug, Serialize, Deserialize)] | 1725 | #[derive(Debug, Serialize, Deserialize)] |
1679 | struct CompletionResolveData { | 1726 | struct CompletionResolveData { |
1680 | position: lsp_types::TextDocumentPositionParams, | 1727 | position: lsp_types::TextDocumentPositionParams, |
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index cb002f700..008758ea0 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs | |||
@@ -543,6 +543,7 @@ impl GlobalState { | |||
543 | .on::<lsp_types::request::Rename>(handlers::handle_rename) | 543 | .on::<lsp_types::request::Rename>(handlers::handle_rename) |
544 | .on::<lsp_types::request::References>(handlers::handle_references) | 544 | .on::<lsp_types::request::References>(handlers::handle_references) |
545 | .on::<lsp_types::request::Formatting>(handlers::handle_formatting) | 545 | .on::<lsp_types::request::Formatting>(handlers::handle_formatting) |
546 | .on::<lsp_types::request::RangeFormatting>(handlers::handle_range_formatting) | ||
546 | .on::<lsp_types::request::DocumentHighlightRequest>(handlers::handle_document_highlight) | 547 | .on::<lsp_types::request::DocumentHighlightRequest>(handlers::handle_document_highlight) |
547 | .on::<lsp_types::request::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare) | 548 | .on::<lsp_types::request::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare) |
548 | .on::<lsp_types::request::CallHierarchyIncomingCalls>( | 549 | .on::<lsp_types::request::CallHierarchyIncomingCalls>( |
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 7a53e4a8b..93b5ff55f 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs | |||
@@ -455,7 +455,11 @@ impl ProjectFolders { | |||
455 | dirs.include.extend(root.include); | 455 | dirs.include.extend(root.include); |
456 | dirs.exclude.extend(root.exclude); | 456 | dirs.exclude.extend(root.exclude); |
457 | for excl in global_excludes { | 457 | for excl in global_excludes { |
458 | if dirs.include.iter().any(|incl| incl.starts_with(excl)) { | 458 | if dirs |
459 | .include | ||
460 | .iter() | ||
461 | .any(|incl| incl.starts_with(excl) || excl.starts_with(incl)) | ||
462 | { | ||
459 | dirs.exclude.push(excl.clone()); | 463 | dirs.exclude.push(excl.clone()); |
460 | } | 464 | } |
461 | } | 465 | } |
diff --git a/crates/rust-analyzer/src/semantic_tokens.rs b/crates/rust-analyzer/src/semantic_tokens.rs index 4fd576adb..db216d951 100644 --- a/crates/rust-analyzer/src/semantic_tokens.rs +++ b/crates/rust-analyzer/src/semantic_tokens.rs | |||
@@ -46,7 +46,7 @@ define_semantic_token_types![ | |||
46 | (BRACE, "brace"), | 46 | (BRACE, "brace"), |
47 | (BRACKET, "bracket"), | 47 | (BRACKET, "bracket"), |
48 | (BUILTIN_TYPE, "builtinType"), | 48 | (BUILTIN_TYPE, "builtinType"), |
49 | (CHAR_LITERAL, "characterLiteral"), | 49 | (CHAR, "character"), |
50 | (COLON, "colon"), | 50 | (COLON, "colon"), |
51 | (COMMA, "comma"), | 51 | (COMMA, "comma"), |
52 | (COMPARISON, "comparison"), | 52 | (COMPARISON, "comparison"), |
@@ -92,6 +92,7 @@ define_semantic_token_modifiers![ | |||
92 | (MUTABLE, "mutable"), | 92 | (MUTABLE, "mutable"), |
93 | (CONSUMING, "consuming"), | 93 | (CONSUMING, "consuming"), |
94 | (ASYNC, "async"), | 94 | (ASYNC, "async"), |
95 | (LIBRARY, "library"), | ||
95 | (UNSAFE, "unsafe"), | 96 | (UNSAFE, "unsafe"), |
96 | (ATTRIBUTE_MODIFIER, "attribute"), | 97 | (ATTRIBUTE_MODIFIER, "attribute"), |
97 | (TRAIT_MODIFIER, "trait"), | 98 | (TRAIT_MODIFIER, "trait"), |
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 410384ae5..f5c8535a2 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs | |||
@@ -466,7 +466,7 @@ fn semantic_token_type_and_modifiers( | |||
466 | HlTag::BoolLiteral => semantic_tokens::BOOLEAN, | 466 | HlTag::BoolLiteral => semantic_tokens::BOOLEAN, |
467 | HlTag::BuiltinType => semantic_tokens::BUILTIN_TYPE, | 467 | HlTag::BuiltinType => semantic_tokens::BUILTIN_TYPE, |
468 | HlTag::ByteLiteral | HlTag::NumericLiteral => lsp_types::SemanticTokenType::NUMBER, | 468 | HlTag::ByteLiteral | HlTag::NumericLiteral => lsp_types::SemanticTokenType::NUMBER, |
469 | HlTag::CharLiteral => semantic_tokens::CHAR_LITERAL, | 469 | HlTag::CharLiteral => semantic_tokens::CHAR, |
470 | HlTag::Comment => lsp_types::SemanticTokenType::COMMENT, | 470 | HlTag::Comment => lsp_types::SemanticTokenType::COMMENT, |
471 | HlTag::EscapeSequence => semantic_tokens::ESCAPE_SEQUENCE, | 471 | HlTag::EscapeSequence => semantic_tokens::ESCAPE_SEQUENCE, |
472 | HlTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER, | 472 | HlTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER, |
@@ -504,6 +504,7 @@ fn semantic_token_type_and_modifiers( | |||
504 | HlMod::Mutable => semantic_tokens::MUTABLE, | 504 | HlMod::Mutable => semantic_tokens::MUTABLE, |
505 | HlMod::Consuming => semantic_tokens::CONSUMING, | 505 | HlMod::Consuming => semantic_tokens::CONSUMING, |
506 | HlMod::Async => semantic_tokens::ASYNC, | 506 | HlMod::Async => semantic_tokens::ASYNC, |
507 | HlMod::Library => semantic_tokens::LIBRARY, | ||
507 | HlMod::Unsafe => semantic_tokens::UNSAFE, | 508 | HlMod::Unsafe => semantic_tokens::UNSAFE, |
508 | HlMod::Callable => semantic_tokens::CALLABLE, | 509 | HlMod::Callable => semantic_tokens::CALLABLE, |
509 | HlMod::Static => lsp_types::SemanticTokenModifier::STATIC, | 510 | HlMod::Static => lsp_types::SemanticTokenModifier::STATIC, |
@@ -533,6 +534,7 @@ pub(crate) fn folding_range( | |||
533 | | FoldKind::Consts | 534 | | FoldKind::Consts |
534 | | FoldKind::Statics | 535 | | FoldKind::Statics |
535 | | FoldKind::WhereClause | 536 | | FoldKind::WhereClause |
537 | | FoldKind::ReturnType | ||
536 | | FoldKind::Array => None, | 538 | | FoldKind::Array => None, |
537 | }; | 539 | }; |
538 | 540 | ||
diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index ca8103668..f7ee29d14 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs | |||
@@ -356,13 +356,17 @@ impl ast::MatchArm { | |||
356 | impl ast::MatchArmList { | 356 | impl ast::MatchArmList { |
357 | pub fn add_arm(&self, arm: ast::MatchArm) { | 357 | pub fn add_arm(&self, arm: ast::MatchArm) { |
358 | normalize_ws_between_braces(self.syntax()); | 358 | normalize_ws_between_braces(self.syntax()); |
359 | let mut elements = Vec::new(); | ||
359 | let position = match self.arms().last() { | 360 | let position = match self.arms().last() { |
360 | Some(last_arm) => { | 361 | Some(last_arm) => { |
361 | let curly = last_arm | 362 | let comma = last_arm |
362 | .syntax() | 363 | .syntax() |
363 | .siblings_with_tokens(Direction::Next) | 364 | .siblings_with_tokens(Direction::Next) |
364 | .find(|it| it.kind() == T![,]); | 365 | .find(|it| it.kind() == T![,]); |
365 | Position::after(curly.unwrap_or_else(|| last_arm.syntax().clone().into())) | 366 | if needs_comma(&last_arm) && comma.is_none() { |
367 | elements.push(make::token(SyntaxKind::COMMA).into()); | ||
368 | } | ||
369 | Position::after(comma.unwrap_or_else(|| last_arm.syntax().clone().into())) | ||
366 | } | 370 | } |
367 | None => match self.l_curly_token() { | 371 | None => match self.l_curly_token() { |
368 | Some(it) => Position::after(it), | 372 | Some(it) => Position::after(it), |
@@ -370,11 +374,16 @@ impl ast::MatchArmList { | |||
370 | }, | 374 | }, |
371 | }; | 375 | }; |
372 | let indent = IndentLevel::from_node(self.syntax()) + 1; | 376 | let indent = IndentLevel::from_node(self.syntax()) + 1; |
373 | let elements = vec![ | 377 | elements.push(make::tokens::whitespace(&format!("\n{}", indent)).into()); |
374 | make::tokens::whitespace(&format!("\n{}", indent)).into(), | 378 | elements.push(arm.syntax().clone().into()); |
375 | arm.syntax().clone().into(), | 379 | if needs_comma(&arm) { |
376 | ]; | 380 | elements.push(make::token(SyntaxKind::COMMA).into()); |
381 | } | ||
377 | ted::insert_all(position, elements); | 382 | ted::insert_all(position, elements); |
383 | |||
384 | fn needs_comma(arm: &ast::MatchArm) -> bool { | ||
385 | arm.expr().map_or(false, |e| !e.is_block_like()) | ||
386 | } | ||
378 | } | 387 | } |
379 | } | 388 | } |
380 | 389 | ||
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index df8f98b5b..884fe0739 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs | |||
@@ -243,6 +243,13 @@ impl ast::Path { | |||
243 | } | 243 | } |
244 | } | 244 | } |
245 | 245 | ||
246 | pub fn as_single_name_ref(&self) -> Option<ast::NameRef> { | ||
247 | match self.qualifier() { | ||
248 | Some(_) => None, | ||
249 | None => self.segment()?.name_ref(), | ||
250 | } | ||
251 | } | ||
252 | |||
246 | pub fn first_qualifier_or_self(&self) -> ast::Path { | 253 | pub fn first_qualifier_or_self(&self) -> ast::Path { |
247 | successors(Some(self.clone()), ast::Path::qualifier).last().unwrap() | 254 | successors(Some(self.clone()), ast::Path::qualifier).last().unwrap() |
248 | } | 255 | } |
diff --git a/crates/test_utils/src/lib.rs b/crates/test_utils/src/lib.rs index fce4fd6bf..bd017567c 100644 --- a/crates/test_utils/src/lib.rs +++ b/crates/test_utils/src/lib.rs | |||
@@ -96,6 +96,21 @@ pub enum RangeOrOffset { | |||
96 | Offset(TextSize), | 96 | Offset(TextSize), |
97 | } | 97 | } |
98 | 98 | ||
99 | impl RangeOrOffset { | ||
100 | pub fn expect_offset(self) -> TextSize { | ||
101 | match self { | ||
102 | RangeOrOffset::Offset(it) => it, | ||
103 | RangeOrOffset::Range(_) => panic!("expected an offset but got a range instead"), | ||
104 | } | ||
105 | } | ||
106 | pub fn expect_range(self) -> TextRange { | ||
107 | match self { | ||
108 | RangeOrOffset::Range(it) => it, | ||
109 | RangeOrOffset::Offset(_) => panic!("expected a range but got an offset"), | ||
110 | } | ||
111 | } | ||
112 | } | ||
113 | |||
99 | impl From<RangeOrOffset> for TextRange { | 114 | impl From<RangeOrOffset> for TextRange { |
100 | fn from(selection: RangeOrOffset) -> Self { | 115 | fn from(selection: RangeOrOffset) -> Self { |
101 | match selection { | 116 | match selection { |
diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index fbe2ce1c9..11a3dd04e 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md | |||
@@ -25,15 +25,21 @@ rust-analyzer supports clangd's extension for opting into UTF-8 as the coordinat | |||
25 | 25 | ||
26 | https://clangd.llvm.org/extensions.html#utf-8-offsets | 26 | https://clangd.llvm.org/extensions.html#utf-8-offsets |
27 | 27 | ||
28 | ## `initializationOptions` | 28 | ## Configuration in `initializationOptions` |
29 | |||
30 | **Issue:** https://github.com/microsoft/language-server-protocol/issues/567 | ||
31 | |||
32 | The `initializationOptions` filed of the `InitializeParams` of the initialization request should contain `"rust-analyzer"` section of the configuration. | ||
33 | |||
34 | `rust-analyzer` normally sends a `"workspace/configuration"` request with `{ "items": ["rust-analyzer"] }` payload. | ||
35 | However, the server can't do this during initialization. | ||
36 | At the same time some essential configuration parameters are needed early on, before servicing requests. | ||
37 | For this reason, we ask that `initializationOptions` contains the configuration, as if the server did make a `"workspace/configuration"` request. | ||
29 | 38 | ||
30 | For `initializationOptions`, `rust-analyzer` expects `"rust-analyzer"` section of the configuration. | ||
31 | That is, `rust-analyzer` usually sends `"workspace/configuration"` request with `{ "items": ["rust-analyzer"] }` payload. | ||
32 | `initializationOptions` should contain the same data that would be in the first item of the result. | ||
33 | If a language client does not know about `rust-analyzer`'s configuration options it can get sensible defaults by doing any of the following: | 39 | If a language client does not know about `rust-analyzer`'s configuration options it can get sensible defaults by doing any of the following: |
34 | * Not sending `initializationOptions` | 40 | * Not sending `initializationOptions` |
35 | * Send `"initializationOptions": null` | 41 | * Sending `"initializationOptions": null` |
36 | * Send `"initializationOptions": {}` | 42 | * Sending `"initializationOptions": {}` |
37 | 43 | ||
38 | ## Snippet `TextEdit` | 44 | ## Snippet `TextEdit` |
39 | 45 | ||
diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index c02bab7cc..4a5782a57 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc | |||
@@ -161,18 +161,18 @@ This should be the reverse mapping of what is passed to `rustc` as `--remap-path | |||
161 | [[rust-analyzer.diagnostics.warningsAsHint]]rust-analyzer.diagnostics.warningsAsHint (default: `[]`):: | 161 | [[rust-analyzer.diagnostics.warningsAsHint]]rust-analyzer.diagnostics.warningsAsHint (default: `[]`):: |
162 | + | 162 | + |
163 | -- | 163 | -- |
164 | List of warnings that should be displayed with info severity. | 164 | List of warnings that should be displayed with hint severity. |
165 | 165 | ||
166 | The warnings will be indicated by a blue squiggly underline in code | 166 | The warnings will be indicated by faded text or three dots in code |
167 | and a blue icon in the `Problems Panel`. | 167 | and will not show up in the `Problems Panel`. |
168 | -- | 168 | -- |
169 | [[rust-analyzer.diagnostics.warningsAsInfo]]rust-analyzer.diagnostics.warningsAsInfo (default: `[]`):: | 169 | [[rust-analyzer.diagnostics.warningsAsInfo]]rust-analyzer.diagnostics.warningsAsInfo (default: `[]`):: |
170 | + | 170 | + |
171 | -- | 171 | -- |
172 | List of warnings that should be displayed with hint severity. | 172 | List of warnings that should be displayed with info severity. |
173 | 173 | ||
174 | The warnings will be indicated by faded text or three dots in code | 174 | The warnings will be indicated by a blue squiggly underline in code |
175 | and will not show up in the `Problems Panel`. | 175 | and a blue icon in the `Problems Panel`. |
176 | -- | 176 | -- |
177 | [[rust-analyzer.files.watcher]]rust-analyzer.files.watcher (default: `"client"`):: | 177 | [[rust-analyzer.files.watcher]]rust-analyzer.files.watcher (default: `"client"`):: |
178 | + | 178 | + |
@@ -346,6 +346,13 @@ Additional arguments to `rustfmt`. | |||
346 | Advanced option, fully override the command rust-analyzer uses for | 346 | Advanced option, fully override the command rust-analyzer uses for |
347 | formatting. | 347 | formatting. |
348 | -- | 348 | -- |
349 | [[rust-analyzer.rustfmt.enableRangeFormatting]]rust-analyzer.rustfmt.enableRangeFormatting (default: `false`):: | ||
350 | + | ||
351 | -- | ||
352 | Enables the use of rustfmt's unstable range formatting command for the | ||
353 | `textDocument/rangeFormatting` request. The rustfmt option is unstable and only | ||
354 | available on a nightly build. | ||
355 | -- | ||
349 | [[rust-analyzer.workspace.symbol.search.scope]]rust-analyzer.workspace.symbol.search.scope (default: `"workspace"`):: | 356 | [[rust-analyzer.workspace.symbol.search.scope]]rust-analyzer.workspace.symbol.search.scope (default: `"workspace"`):: |
350 | + | 357 | + |
351 | -- | 358 | -- |
diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index ad367511b..1f95df56e 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc | |||
@@ -139,18 +139,18 @@ $ cargo xtask install --server | |||
139 | === rust-analyzer Language Server Binary | 139 | === rust-analyzer Language Server Binary |
140 | 140 | ||
141 | Other editors generally require the `rust-analyzer` binary to be in `$PATH`. | 141 | Other editors generally require the `rust-analyzer` binary to be in `$PATH`. |
142 | You can download the pre-built binary from the https://github.com/rust-analyzer/rust-analyzer/releases[releases] page. | 142 | You can download pre-built binaries from the https://github.com/rust-analyzer/rust-analyzer/releases[releases] page. |
143 | Typically, you then need to rename the binary for your platform, e.g. `rust-analyzer-mac` if you're on Mac OS, to `rust-analyzer` and make it executable in addition to moving it into a directory in your `$PATH`. | 143 | You will need to uncompress and rename the binary for your platform, e.g. from `rust-analyzer-aarch64-apple-darwin.gz` on Mac OS to `rust-analyzer`, make it executable, then move it into a directory in your `$PATH`. |
144 | 144 | ||
145 | On Linux to install the `rust-analyzer` binary into `~/.local/bin`, this commands could be used | 145 | On Linux to install the `rust-analyzer` binary into `~/.local/bin`, these commands should work: |
146 | 146 | ||
147 | [source,bash] | 147 | [source,bash] |
148 | ---- | 148 | ---- |
149 | $ curl -L https://github.com/rust-analyzer/rust-analyzer/releases/latest/download/rust-analyzer-linux -o ~/.local/bin/rust-analyzer | 149 | $ curl -L https://github.com/rust-analyzer/rust-analyzer/releases/latest/download/rust-analyzer-x86_64-unknown-linux-gnu.gz | gunzip -c - > ~/.local/bin/rust-analyzer |
150 | $ chmod +x ~/.local/bin/rust-analyzer | 150 | $ chmod +x ~/.local/bin/rust-analyzer |
151 | ---- | 151 | ---- |
152 | 152 | ||
153 | Ensure `~/.local/bin` is listed in the `$PATH` variable. | 153 | Make sure that `~/.local/bin` is listed in the `$PATH` variable and use the appropriate URL if you're not on a `x86-64` system. |
154 | 154 | ||
155 | Alternatively, you can install it from source using the command below. | 155 | Alternatively, you can install it from source using the command below. |
156 | You'll need the latest stable version of the Rust toolchain. | 156 | You'll need the latest stable version of the Rust toolchain. |
diff --git a/editors/code/package.json b/editors/code/package.json index 17d9281ff..5b80cc1f9 100644 --- a/editors/code/package.json +++ b/editors/code/package.json | |||
@@ -597,7 +597,7 @@ | |||
597 | "type": "object" | 597 | "type": "object" |
598 | }, | 598 | }, |
599 | "rust-analyzer.diagnostics.warningsAsHint": { | 599 | "rust-analyzer.diagnostics.warningsAsHint": { |
600 | "markdownDescription": "List of warnings that should be displayed with info severity.\n\nThe warnings will be indicated by a blue squiggly underline in code\nand a blue icon in the `Problems Panel`.", | 600 | "markdownDescription": "List of warnings that should be displayed with hint severity.\n\nThe warnings will be indicated by faded text or three dots in code\nand will not show up in the `Problems Panel`.", |
601 | "default": [], | 601 | "default": [], |
602 | "type": "array", | 602 | "type": "array", |
603 | "items": { | 603 | "items": { |
@@ -605,7 +605,7 @@ | |||
605 | } | 605 | } |
606 | }, | 606 | }, |
607 | "rust-analyzer.diagnostics.warningsAsInfo": { | 607 | "rust-analyzer.diagnostics.warningsAsInfo": { |
608 | "markdownDescription": "List of warnings that should be displayed with hint severity.\n\nThe warnings will be indicated by faded text or three dots in code\nand will not show up in the `Problems Panel`.", | 608 | "markdownDescription": "List of warnings that should be displayed with info severity.\n\nThe warnings will be indicated by a blue squiggly underline in code\nand a blue icon in the `Problems Panel`.", |
609 | "default": [], | 609 | "default": [], |
610 | "type": "array", | 610 | "type": "array", |
611 | "items": { | 611 | "items": { |
@@ -795,6 +795,11 @@ | |||
795 | "type": "string" | 795 | "type": "string" |
796 | } | 796 | } |
797 | }, | 797 | }, |
798 | "rust-analyzer.rustfmt.enableRangeFormatting": { | ||
799 | "markdownDescription": "Enables the use of rustfmt's unstable range formatting command for the\n`textDocument/rangeFormatting` request. The rustfmt option is unstable and only\navailable on a nightly build.", | ||
800 | "default": false, | ||
801 | "type": "boolean" | ||
802 | }, | ||
798 | "rust-analyzer.workspace.symbol.search.scope": { | 803 | "rust-analyzer.workspace.symbol.search.scope": { |
799 | "markdownDescription": "Workspace symbol search scope.", | 804 | "markdownDescription": "Workspace symbol search scope.", |
800 | "default": "workspace", | 805 | "default": "workspace", |
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts index 69dbe2535..f13ae07e1 100644 --- a/editors/code/src/client.ts +++ b/editors/code/src/client.ts | |||
@@ -32,14 +32,9 @@ export function createClient(serverPath: string, workspace: Workspace, extraEnv: | |||
32 | const newEnv = Object.assign({}, process.env); | 32 | const newEnv = Object.assign({}, process.env); |
33 | Object.assign(newEnv, extraEnv); | 33 | Object.assign(newEnv, extraEnv); |
34 | 34 | ||
35 | let cwd = undefined; | ||
36 | if (workspace.kind === "Workspace Folder") { | ||
37 | cwd = workspace.folder.fsPath; | ||
38 | }; | ||
39 | |||
40 | const run: lc.Executable = { | 35 | const run: lc.Executable = { |
41 | command: serverPath, | 36 | command: serverPath, |
42 | options: { cwd, env: newEnv }, | 37 | options: { env: newEnv }, |
43 | }; | 38 | }; |
44 | const serverOptions: lc.ServerOptions = { | 39 | const serverOptions: lc.ServerOptions = { |
45 | run, | 40 | run, |
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts index 22c5f62a1..cf67dd8cf 100644 --- a/editors/code/src/ctx.ts +++ b/editors/code/src/ctx.ts | |||
@@ -10,7 +10,6 @@ import { ServerStatusParams } from './lsp_ext'; | |||
10 | export type Workspace = | 10 | export type Workspace = |
11 | { | 11 | { |
12 | kind: 'Workspace Folder'; | 12 | kind: 'Workspace Folder'; |
13 | folder: vscode.Uri; | ||
14 | } | 13 | } |
15 | | { | 14 | | { |
16 | kind: 'Detached Files'; | 15 | kind: 'Detached Files'; |
diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index b735186fe..d26273246 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts | |||
@@ -45,8 +45,7 @@ async function tryActivate(context: vscode.ExtensionContext) { | |||
45 | throw new Error(message); | 45 | throw new Error(message); |
46 | }); | 46 | }); |
47 | 47 | ||
48 | const workspaceFolder = vscode.workspace.workspaceFolders?.[0]; | 48 | if (vscode.workspace.workspaceFolders?.length === 0) { |
49 | if (workspaceFolder === undefined) { | ||
50 | const rustDocuments = vscode.workspace.textDocuments.filter(document => isRustDocument(document)); | 49 | const rustDocuments = vscode.workspace.textDocuments.filter(document => isRustDocument(document)); |
51 | if (rustDocuments.length > 0) { | 50 | if (rustDocuments.length > 0) { |
52 | ctx = await Ctx.create(config, context, serverPath, { kind: 'Detached Files', files: rustDocuments }); | 51 | ctx = await Ctx.create(config, context, serverPath, { kind: 'Detached Files', files: rustDocuments }); |
@@ -58,8 +57,8 @@ async function tryActivate(context: vscode.ExtensionContext) { | |||
58 | // registers its `onDidChangeDocument` handler before us. | 57 | // registers its `onDidChangeDocument` handler before us. |
59 | // | 58 | // |
60 | // This a horribly, horribly wrong way to deal with this problem. | 59 | // This a horribly, horribly wrong way to deal with this problem. |
61 | ctx = await Ctx.create(config, context, serverPath, { kind: "Workspace Folder", folder: workspaceFolder.uri }); | 60 | ctx = await Ctx.create(config, context, serverPath, { kind: "Workspace Folder" }); |
62 | ctx.pushCleanup(activateTaskProvider(workspaceFolder, ctx.config)); | 61 | ctx.pushCleanup(activateTaskProvider(ctx.config)); |
63 | } | 62 | } |
64 | await initCommonContext(context, ctx); | 63 | await initCommonContext(context, ctx); |
65 | 64 | ||
diff --git a/editors/code/src/run.ts b/editors/code/src/run.ts index 138e3f686..d0be84068 100644 --- a/editors/code/src/run.ts +++ b/editors/code/src/run.ts | |||
@@ -142,7 +142,11 @@ export async function createTask(runnable: ra.Runnable, config: Config): Promise | |||
142 | // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion | 142 | // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion |
143 | const target = vscode.workspace.workspaceFolders![0]; // safe, see main activate() | 143 | const target = vscode.workspace.workspaceFolders![0]; // safe, see main activate() |
144 | const cargoTask = await tasks.buildCargoTask(target, definition, runnable.label, args, config.cargoRunner, true); | 144 | const cargoTask = await tasks.buildCargoTask(target, definition, runnable.label, args, config.cargoRunner, true); |
145 | |||
145 | cargoTask.presentationOptions.clear = true; | 146 | cargoTask.presentationOptions.clear = true; |
147 | // Sadly, this doesn't prevent focus stealing if the terminal is currently | ||
148 | // hidden, and will become revealed due to task exucution. | ||
149 | cargoTask.presentationOptions.focus = false; | ||
146 | 150 | ||
147 | return cargoTask; | 151 | return cargoTask; |
148 | } | 152 | } |
diff --git a/editors/code/src/snippets.ts b/editors/code/src/snippets.ts index 58f7aa128..a409e5296 100644 --- a/editors/code/src/snippets.ts +++ b/editors/code/src/snippets.ts | |||
@@ -52,7 +52,7 @@ export async function applySnippetTextEdits(editor: vscode.TextEditor, edits: vs | |||
52 | } else { | 52 | } else { |
53 | builder.replace(indel.range, indel.newText); | 53 | builder.replace(indel.range, indel.newText); |
54 | } | 54 | } |
55 | lineDelta = countLines(indel.newText) - (indel.range.end.line - indel.range.start.line); | 55 | lineDelta += countLines(indel.newText) - (indel.range.end.line - indel.range.start.line); |
56 | } | 56 | } |
57 | }); | 57 | }); |
58 | if (selections.length > 0) editor.selections = selections; | 58 | if (selections.length > 0) editor.selections = selections; |
diff --git a/editors/code/src/tasks.ts b/editors/code/src/tasks.ts index a3ff15102..694ee1e41 100644 --- a/editors/code/src/tasks.ts +++ b/editors/code/src/tasks.ts | |||
@@ -17,11 +17,9 @@ export interface CargoTaskDefinition extends vscode.TaskDefinition { | |||
17 | } | 17 | } |
18 | 18 | ||
19 | class CargoTaskProvider implements vscode.TaskProvider { | 19 | class CargoTaskProvider implements vscode.TaskProvider { |
20 | private readonly target: vscode.WorkspaceFolder; | ||
21 | private readonly config: Config; | 20 | private readonly config: Config; |
22 | 21 | ||
23 | constructor(target: vscode.WorkspaceFolder, config: Config) { | 22 | constructor(config: Config) { |
24 | this.target = target; | ||
25 | this.config = config; | 23 | this.config = config; |
26 | } | 24 | } |
27 | 25 | ||
@@ -40,10 +38,12 @@ class CargoTaskProvider implements vscode.TaskProvider { | |||
40 | ]; | 38 | ]; |
41 | 39 | ||
42 | const tasks: vscode.Task[] = []; | 40 | const tasks: vscode.Task[] = []; |
43 | for (const def of defs) { | 41 | for (const workspaceTarget of vscode.workspace.workspaceFolders || []) { |
44 | const vscodeTask = await buildCargoTask(this.target, { type: TASK_TYPE, command: def.command }, `cargo ${def.command}`, [def.command], this.config.cargoRunner); | 42 | for (const def of defs) { |
45 | vscodeTask.group = def.group; | 43 | const vscodeTask = await buildCargoTask(workspaceTarget, { type: TASK_TYPE, command: def.command }, `cargo ${def.command}`, [def.command], this.config.cargoRunner); |
46 | tasks.push(vscodeTask); | 44 | vscodeTask.group = def.group; |
45 | tasks.push(vscodeTask); | ||
46 | } | ||
47 | } | 47 | } |
48 | 48 | ||
49 | return tasks; | 49 | return tasks; |
@@ -58,14 +58,19 @@ class CargoTaskProvider implements vscode.TaskProvider { | |||
58 | 58 | ||
59 | if (definition.type === TASK_TYPE && definition.command) { | 59 | if (definition.type === TASK_TYPE && definition.command) { |
60 | const args = [definition.command].concat(definition.args ?? []); | 60 | const args = [definition.command].concat(definition.args ?? []); |
61 | 61 | if (isWorkspaceFolder(task.scope)) { | |
62 | return await buildCargoTask(this.target, definition, task.name, args, this.config.cargoRunner); | 62 | return await buildCargoTask(task.scope, definition, task.name, args, this.config.cargoRunner); |
63 | } | ||
63 | } | 64 | } |
64 | 65 | ||
65 | return undefined; | 66 | return undefined; |
66 | } | 67 | } |
67 | } | 68 | } |
68 | 69 | ||
70 | function isWorkspaceFolder(scope?: any): scope is vscode.WorkspaceFolder { | ||
71 | return (scope as vscode.WorkspaceFolder).name !== undefined; | ||
72 | } | ||
73 | |||
69 | export async function buildCargoTask( | 74 | export async function buildCargoTask( |
70 | target: vscode.WorkspaceFolder, | 75 | target: vscode.WorkspaceFolder, |
71 | definition: CargoTaskDefinition, | 76 | definition: CargoTaskDefinition, |
@@ -119,7 +124,7 @@ export async function buildCargoTask( | |||
119 | ); | 124 | ); |
120 | } | 125 | } |
121 | 126 | ||
122 | export function activateTaskProvider(target: vscode.WorkspaceFolder, config: Config): vscode.Disposable { | 127 | export function activateTaskProvider(config: Config): vscode.Disposable { |
123 | const provider = new CargoTaskProvider(target, config); | 128 | const provider = new CargoTaskProvider(config); |
124 | return vscode.tasks.registerTaskProvider(TASK_TYPE, provider); | 129 | return vscode.tasks.registerTaskProvider(TASK_TYPE, provider); |
125 | } | 130 | } |
diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs index 7ac9ae5b8..3a67294c5 100644 --- a/xtask/src/dist.rs +++ b/xtask/src/dist.rs | |||
@@ -67,7 +67,11 @@ fn dist_client(version: &str, release_tag: &str) -> Result<()> { | |||
67 | fn dist_server(release_channel: &str) -> Result<()> { | 67 | fn dist_server(release_channel: &str) -> Result<()> { |
68 | let _e = pushenv("RUST_ANALYZER_CHANNEL", release_channel); | 68 | let _e = pushenv("RUST_ANALYZER_CHANNEL", release_channel); |
69 | let _e = pushenv("CARGO_PROFILE_RELEASE_LTO", "thin"); | 69 | let _e = pushenv("CARGO_PROFILE_RELEASE_LTO", "thin"); |
70 | let _e = pushenv("CARGO_PROFILE_RELEASE_DEBUG", "1"); | 70 | |
71 | // Uncomment to enable debug info for releases. Note that: | ||
72 | // * debug info is split on windows and macs, so it does nothing for those platforms, | ||
73 | // * on Linux, this blows up the binary size from 8MB to 43MB, which is unreasonable. | ||
74 | // let _e = pushenv("CARGO_PROFILE_RELEASE_DEBUG", "1"); | ||
71 | 75 | ||
72 | let target = get_target(); | 76 | let target = get_target(); |
73 | if target.contains("-linux-gnu") || target.contains("-linux-musl") { | 77 | if target.contains("-linux-gnu") || target.contains("-linux-musl") { |
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs index 82b33a7a0..6f687a788 100644 --- a/xtask/src/tidy.rs +++ b/xtask/src/tidy.rs | |||
@@ -275,6 +275,7 @@ fn check_todo(path: &Path, text: &str) { | |||
275 | // Some of our assists generate `todo!()`. | 275 | // Some of our assists generate `todo!()`. |
276 | "handlers/add_turbo_fish.rs", | 276 | "handlers/add_turbo_fish.rs", |
277 | "handlers/generate_function.rs", | 277 | "handlers/generate_function.rs", |
278 | "handlers/fill_match_arms.rs", | ||
278 | // To support generating `todo!()` in assists, we have `expr_todo()` in | 279 | // To support generating `todo!()` in assists, we have `expr_todo()` in |
279 | // `ast::make`. | 280 | // `ast::make`. |
280 | "ast/make.rs", | 281 | "ast/make.rs", |