diff options
59 files changed, 1391 insertions, 1153 deletions
diff --git a/Cargo.lock b/Cargo.lock index 1101770e7..c6994127b 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -1550,6 +1550,7 @@ version = "1.0.60" | |||
1550 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1550 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1551 | checksum = "1500e84d27fe482ed1dc791a56eddc2f230046a040fa908c08bda1d9fb615779" | 1551 | checksum = "1500e84d27fe482ed1dc791a56eddc2f230046a040fa908c08bda1d9fb615779" |
1552 | dependencies = [ | 1552 | dependencies = [ |
1553 | "indexmap", | ||
1553 | "itoa", | 1554 | "itoa", |
1554 | "ryu", | 1555 | "ryu", |
1555 | "serde", | 1556 | "serde", |
diff --git a/crates/assists/src/assist_config.rs b/crates/assists/src/assist_config.rs index 786224cfa..c458d9054 100644 --- a/crates/assists/src/assist_config.rs +++ b/crates/assists/src/assist_config.rs | |||
@@ -5,7 +5,7 @@ | |||
5 | //! assists if we are allowed to. | 5 | //! assists if we are allowed to. |
6 | 6 | ||
7 | use hir::PrefixKind; | 7 | use hir::PrefixKind; |
8 | use ide_db::helpers::insert_use::MergeBehaviour; | 8 | use ide_db::helpers::insert_use::MergeBehavior; |
9 | 9 | ||
10 | use crate::AssistKind; | 10 | use crate::AssistKind; |
11 | 11 | ||
@@ -39,12 +39,12 @@ impl Default for AssistConfig { | |||
39 | 39 | ||
40 | #[derive(Clone, Copy, Debug, PartialEq, Eq)] | 40 | #[derive(Clone, Copy, Debug, PartialEq, Eq)] |
41 | pub struct InsertUseConfig { | 41 | pub struct InsertUseConfig { |
42 | pub merge: Option<MergeBehaviour>, | 42 | pub merge: Option<MergeBehavior>, |
43 | pub prefix_kind: PrefixKind, | 43 | pub prefix_kind: PrefixKind, |
44 | } | 44 | } |
45 | 45 | ||
46 | impl Default for InsertUseConfig { | 46 | impl Default for InsertUseConfig { |
47 | fn default() -> Self { | 47 | fn default() -> Self { |
48 | InsertUseConfig { merge: Some(MergeBehaviour::Full), prefix_kind: PrefixKind::Plain } | 48 | InsertUseConfig { merge: Some(MergeBehavior::Full), prefix_kind: PrefixKind::Plain } |
49 | } | 49 | } |
50 | } | 50 | } |
diff --git a/crates/assists/src/handlers/merge_imports.rs b/crates/assists/src/handlers/merge_imports.rs index b7e853994..2f0dc7831 100644 --- a/crates/assists/src/handlers/merge_imports.rs +++ b/crates/assists/src/handlers/merge_imports.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use ide_db::helpers::insert_use::{try_merge_imports, try_merge_trees, MergeBehaviour}; | 1 | use ide_db::helpers::insert_use::{try_merge_imports, try_merge_trees, MergeBehavior}; |
2 | use syntax::{ | 2 | use syntax::{ |
3 | algo::{neighbor, SyntaxRewriter}, | 3 | algo::{neighbor, SyntaxRewriter}, |
4 | ast, AstNode, | 4 | ast, AstNode, |
@@ -30,7 +30,7 @@ pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext) -> Option<() | |||
30 | if let Some(use_item) = tree.syntax().parent().and_then(ast::Use::cast) { | 30 | if let Some(use_item) = tree.syntax().parent().and_then(ast::Use::cast) { |
31 | let (merged, to_delete) = | 31 | let (merged, to_delete) = |
32 | next_prev().filter_map(|dir| neighbor(&use_item, dir)).find_map(|use_item2| { | 32 | next_prev().filter_map(|dir| neighbor(&use_item, dir)).find_map(|use_item2| { |
33 | try_merge_imports(&use_item, &use_item2, MergeBehaviour::Full).zip(Some(use_item2)) | 33 | try_merge_imports(&use_item, &use_item2, MergeBehavior::Full).zip(Some(use_item2)) |
34 | })?; | 34 | })?; |
35 | 35 | ||
36 | rewriter.replace_ast(&use_item, &merged); | 36 | rewriter.replace_ast(&use_item, &merged); |
@@ -42,7 +42,7 @@ pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext) -> Option<() | |||
42 | } else { | 42 | } else { |
43 | let (merged, to_delete) = | 43 | let (merged, to_delete) = |
44 | next_prev().filter_map(|dir| neighbor(&tree, dir)).find_map(|use_tree| { | 44 | next_prev().filter_map(|dir| neighbor(&tree, dir)).find_map(|use_tree| { |
45 | try_merge_trees(&tree, &use_tree, MergeBehaviour::Full).zip(Some(use_tree)) | 45 | try_merge_trees(&tree, &use_tree, MergeBehavior::Full).zip(Some(use_tree)) |
46 | })?; | 46 | })?; |
47 | 47 | ||
48 | rewriter.replace_ast(&tree, &merged); | 48 | rewriter.replace_ast(&tree, &merged); |
diff --git a/crates/base_db/src/lib.rs b/crates/base_db/src/lib.rs index 5571af495..595f28ada 100644 --- a/crates/base_db/src/lib.rs +++ b/crates/base_db/src/lib.rs | |||
@@ -18,7 +18,7 @@ pub use crate::{ | |||
18 | }, | 18 | }, |
19 | }; | 19 | }; |
20 | pub use salsa; | 20 | pub use salsa; |
21 | pub use vfs::{file_set::FileSet, FileId, VfsPath}; | 21 | pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, FileId, VfsPath}; |
22 | 22 | ||
23 | #[macro_export] | 23 | #[macro_export] |
24 | macro_rules! impl_intern_key { | 24 | macro_rules! impl_intern_key { |
@@ -91,12 +91,7 @@ pub const DEFAULT_LRU_CAP: usize = 128; | |||
91 | pub trait FileLoader { | 91 | pub trait FileLoader { |
92 | /// Text of the file. | 92 | /// Text of the file. |
93 | fn file_text(&self, file_id: FileId) -> Arc<String>; | 93 | fn file_text(&self, file_id: FileId) -> Arc<String>; |
94 | /// Note that we intentionally accept a `&str` and not a `&Path` here. This | 94 | fn resolve_path(&self, path: AnchoredPath) -> Option<FileId>; |
95 | /// method exists to handle `#[path = "/some/path.rs"] mod foo;` and such, | ||
96 | /// so the input is guaranteed to be utf-8 string. One might be tempted to | ||
97 | /// introduce some kind of "utf-8 path with / separators", but that's a bad idea. Behold | ||
98 | /// `#[path = "C://no/way"]` | ||
99 | fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId>; | ||
100 | fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>; | 95 | fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>; |
101 | } | 96 | } |
102 | 97 | ||
@@ -155,11 +150,11 @@ impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> { | |||
155 | fn file_text(&self, file_id: FileId) -> Arc<String> { | 150 | fn file_text(&self, file_id: FileId) -> Arc<String> { |
156 | SourceDatabaseExt::file_text(self.0, file_id) | 151 | SourceDatabaseExt::file_text(self.0, file_id) |
157 | } | 152 | } |
158 | fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { | 153 | fn resolve_path(&self, path: AnchoredPath) -> Option<FileId> { |
159 | // FIXME: this *somehow* should be platform agnostic... | 154 | // FIXME: this *somehow* should be platform agnostic... |
160 | let source_root = self.0.file_source_root(anchor); | 155 | let source_root = self.0.file_source_root(path.anchor); |
161 | let source_root = self.0.source_root(source_root); | 156 | let source_root = self.0.source_root(source_root); |
162 | source_root.file_set.resolve_path(anchor, path) | 157 | source_root.file_set.resolve_path(path) |
163 | } | 158 | } |
164 | 159 | ||
165 | fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { | 160 | fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { |
diff --git a/crates/completion/src/completions/unqualified_path.rs b/crates/completion/src/completions/unqualified_path.rs index 4e4e2b36f..2b0924ae5 100644 --- a/crates/completion/src/completions/unqualified_path.rs +++ b/crates/completion/src/completions/unqualified_path.rs | |||
@@ -45,7 +45,7 @@ pub(crate) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionC | |||
45 | }); | 45 | }); |
46 | 46 | ||
47 | if ctx.config.enable_autoimport_completions && ctx.config.resolve_additional_edits_lazily() { | 47 | if ctx.config.enable_autoimport_completions && ctx.config.resolve_additional_edits_lazily() { |
48 | fuzzy_completion(acc, ctx).unwrap_or_default() | 48 | fuzzy_completion(acc, ctx); |
49 | } | 49 | } |
50 | } | 50 | } |
51 | 51 | ||
@@ -100,10 +100,10 @@ fn complete_enum_variants(acc: &mut Completions, ctx: &CompletionContext, ty: &T | |||
100 | // To avoid an excessive amount of the results returned, completion input is checked for inclusion in the identifiers only | 100 | // To avoid an excessive amount of the results returned, completion input is checked for inclusion in the identifiers only |
101 | // (i.e. in `HashMap` in the `std::collections::HashMap` path), also not in the module indentifiers. | 101 | // (i.e. in `HashMap` in the `std::collections::HashMap` path), also not in the module indentifiers. |
102 | // | 102 | // |
103 | // .Merge Behaviour | 103 | // .Merge Behavior |
104 | // | 104 | // |
105 | // It is possible to configure how use-trees are merged with the `importMergeBehaviour` setting. | 105 | // It is possible to configure how use-trees are merged with the `importMergeBehavior` setting. |
106 | // Mimics the corresponding behaviour of the `Auto Import` feature. | 106 | // Mimics the corresponding behavior of the `Auto Import` feature. |
107 | // | 107 | // |
108 | // .LSP and performance implications | 108 | // .LSP and performance implications |
109 | // | 109 | // |
@@ -147,11 +147,7 @@ fn fuzzy_completion(acc: &mut Completions, ctx: &CompletionContext) -> Option<() | |||
147 | .filter_map(|(import_path, definition)| { | 147 | .filter_map(|(import_path, definition)| { |
148 | render_resolution_with_import( | 148 | render_resolution_with_import( |
149 | RenderContext::new(ctx), | 149 | RenderContext::new(ctx), |
150 | ImportEdit { | 150 | ImportEdit { import_path: import_path.clone(), import_scope: import_scope.clone() }, |
151 | import_path: import_path.clone(), | ||
152 | import_scope: import_scope.clone(), | ||
153 | merge_behaviour: ctx.config.merge, | ||
154 | }, | ||
155 | &definition, | 151 | &definition, |
156 | ) | 152 | ) |
157 | }); | 153 | }); |
diff --git a/crates/completion/src/config.rs b/crates/completion/src/config.rs index 5175b9d69..30577dc11 100644 --- a/crates/completion/src/config.rs +++ b/crates/completion/src/config.rs | |||
@@ -4,7 +4,7 @@ | |||
4 | //! module, and we use to statically check that we only produce snippet | 4 | //! module, and we use to statically check that we only produce snippet |
5 | //! completions if we are allowed to. | 5 | //! completions if we are allowed to. |
6 | 6 | ||
7 | use ide_db::helpers::insert_use::MergeBehaviour; | 7 | use ide_db::helpers::insert_use::MergeBehavior; |
8 | use rustc_hash::FxHashSet; | 8 | use rustc_hash::FxHashSet; |
9 | 9 | ||
10 | #[derive(Clone, Debug, PartialEq, Eq)] | 10 | #[derive(Clone, Debug, PartialEq, Eq)] |
@@ -14,7 +14,7 @@ pub struct CompletionConfig { | |||
14 | pub add_call_parenthesis: bool, | 14 | pub add_call_parenthesis: bool, |
15 | pub add_call_argument_snippets: bool, | 15 | pub add_call_argument_snippets: bool, |
16 | pub snippet_cap: Option<SnippetCap>, | 16 | pub snippet_cap: Option<SnippetCap>, |
17 | pub merge: Option<MergeBehaviour>, | 17 | pub merge: Option<MergeBehavior>, |
18 | /// A set of capabilities, enabled on the client and supported on the server. | 18 | /// A set of capabilities, enabled on the client and supported on the server. |
19 | pub active_resolve_capabilities: FxHashSet<CompletionResolveCapability>, | 19 | pub active_resolve_capabilities: FxHashSet<CompletionResolveCapability>, |
20 | } | 20 | } |
@@ -56,7 +56,7 @@ impl Default for CompletionConfig { | |||
56 | add_call_parenthesis: true, | 56 | add_call_parenthesis: true, |
57 | add_call_argument_snippets: true, | 57 | add_call_argument_snippets: true, |
58 | snippet_cap: Some(SnippetCap { _private: () }), | 58 | snippet_cap: Some(SnippetCap { _private: () }), |
59 | merge: Some(MergeBehaviour::Full), | 59 | merge: Some(MergeBehavior::Full), |
60 | active_resolve_capabilities: FxHashSet::default(), | 60 | active_resolve_capabilities: FxHashSet::default(), |
61 | } | 61 | } |
62 | } | 62 | } |
diff --git a/crates/completion/src/item.rs b/crates/completion/src/item.rs index bd94402d7..65f8353e7 100644 --- a/crates/completion/src/item.rs +++ b/crates/completion/src/item.rs | |||
@@ -4,7 +4,7 @@ use std::fmt; | |||
4 | 4 | ||
5 | use hir::{Documentation, ModPath, Mutability}; | 5 | use hir::{Documentation, ModPath, Mutability}; |
6 | use ide_db::helpers::{ | 6 | use ide_db::helpers::{ |
7 | insert_use::{self, ImportScope, MergeBehaviour}, | 7 | insert_use::{self, ImportScope, MergeBehavior}, |
8 | mod_path_to_ast, | 8 | mod_path_to_ast, |
9 | }; | 9 | }; |
10 | use syntax::{algo, TextRange}; | 10 | use syntax::{algo, TextRange}; |
@@ -271,19 +271,18 @@ impl CompletionItem { | |||
271 | pub struct ImportEdit { | 271 | pub struct ImportEdit { |
272 | pub import_path: ModPath, | 272 | pub import_path: ModPath, |
273 | pub import_scope: ImportScope, | 273 | pub import_scope: ImportScope, |
274 | pub merge_behaviour: Option<MergeBehaviour>, | ||
275 | } | 274 | } |
276 | 275 | ||
277 | impl ImportEdit { | 276 | impl ImportEdit { |
278 | /// Attempts to insert the import to the given scope, producing a text edit. | 277 | /// Attempts to insert the import to the given scope, producing a text edit. |
279 | /// May return no edit in edge cases, such as scope already containing the import. | 278 | /// May return no edit in edge cases, such as scope already containing the import. |
280 | pub fn to_text_edit(&self) -> Option<TextEdit> { | 279 | pub fn to_text_edit(&self, merge_behavior: Option<MergeBehavior>) -> Option<TextEdit> { |
281 | let _p = profile::span("ImportEdit::to_text_edit"); | 280 | let _p = profile::span("ImportEdit::to_text_edit"); |
282 | 281 | ||
283 | let rewriter = insert_use::insert_use( | 282 | let rewriter = insert_use::insert_use( |
284 | &self.import_scope, | 283 | &self.import_scope, |
285 | mod_path_to_ast(&self.import_path), | 284 | mod_path_to_ast(&self.import_path), |
286 | self.merge_behaviour, | 285 | merge_behavior, |
287 | ); | 286 | ); |
288 | let old_ast = rewriter.rewrite_root()?; | 287 | let old_ast = rewriter.rewrite_root()?; |
289 | let mut import_insert = TextEdit::builder(); | 288 | let mut import_insert = TextEdit::builder(); |
diff --git a/crates/completion/src/lib.rs b/crates/completion/src/lib.rs index f60f87243..8e27bb153 100644 --- a/crates/completion/src/lib.rs +++ b/crates/completion/src/lib.rs | |||
@@ -153,9 +153,7 @@ pub fn resolve_completion_edits( | |||
153 | }) | 153 | }) |
154 | .find(|mod_path| mod_path.to_string() == full_import_path)?; | 154 | .find(|mod_path| mod_path.to_string() == full_import_path)?; |
155 | 155 | ||
156 | ImportEdit { import_path, import_scope, merge_behaviour: config.merge } | 156 | ImportEdit { import_path, import_scope }.to_text_edit(config.merge).map(|edit| vec![edit]) |
157 | .to_text_edit() | ||
158 | .map(|edit| vec![edit]) | ||
159 | } | 157 | } |
160 | 158 | ||
161 | #[cfg(test)] | 159 | #[cfg(test)] |
diff --git a/crates/completion/src/test_utils.rs b/crates/completion/src/test_utils.rs index 25f5f4924..db896b2df 100644 --- a/crates/completion/src/test_utils.rs +++ b/crates/completion/src/test_utils.rs | |||
@@ -98,7 +98,8 @@ pub(crate) fn check_edit_with_config( | |||
98 | let mut actual = db.file_text(position.file_id).to_string(); | 98 | let mut actual = db.file_text(position.file_id).to_string(); |
99 | 99 | ||
100 | let mut combined_edit = completion.text_edit().to_owned(); | 100 | let mut combined_edit = completion.text_edit().to_owned(); |
101 | if let Some(import_text_edit) = completion.import_to_add().and_then(|edit| edit.to_text_edit()) | 101 | if let Some(import_text_edit) = |
102 | completion.import_to_add().and_then(|edit| edit.to_text_edit(config.merge)) | ||
102 | { | 103 | { |
103 | combined_edit.union(import_text_edit).expect( | 104 | combined_edit.union(import_text_edit).expect( |
104 | "Failed to apply completion resolve changes: change ranges overlap, but should not", | 105 | "Failed to apply completion resolve changes: change ranges overlap, but should not", |
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index c7c7377d7..302a52491 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs | |||
@@ -39,7 +39,7 @@ pub use crate::{ | |||
39 | Struct, Trait, Type, TypeAlias, TypeParam, Union, VariantDef, | 39 | Struct, Trait, Type, TypeAlias, TypeParam, Union, VariantDef, |
40 | }, | 40 | }, |
41 | has_source::HasSource, | 41 | has_source::HasSource, |
42 | semantics::{original_range, PathResolution, Semantics, SemanticsScope}, | 42 | semantics::{PathResolution, Semantics, SemanticsScope}, |
43 | }; | 43 | }; |
44 | 44 | ||
45 | pub use hir_def::{ | 45 | pub use hir_def::{ |
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index c61a430e1..4315ad48b 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs | |||
@@ -13,10 +13,7 @@ use hir_expand::{hygiene::Hygiene, name::AsName, ExpansionInfo}; | |||
13 | use hir_ty::associated_type_shorthand_candidates; | 13 | use hir_ty::associated_type_shorthand_candidates; |
14 | use itertools::Itertools; | 14 | use itertools::Itertools; |
15 | use rustc_hash::{FxHashMap, FxHashSet}; | 15 | use rustc_hash::{FxHashMap, FxHashSet}; |
16 | use syntax::{ | 16 | use syntax::{algo::find_node_at_offset, ast, AstNode, SyntaxNode, SyntaxToken, TextSize}; |
17 | algo::{find_node_at_offset, skip_trivia_token}, | ||
18 | ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextSize, | ||
19 | }; | ||
20 | 17 | ||
21 | use crate::{ | 18 | use crate::{ |
22 | code_model::Access, | 19 | code_model::Access, |
@@ -25,7 +22,7 @@ use crate::{ | |||
25 | semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, | 22 | semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, |
26 | source_analyzer::{resolve_hir_path, SourceAnalyzer}, | 23 | source_analyzer::{resolve_hir_path, SourceAnalyzer}, |
27 | AssocItem, Callable, Crate, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, | 24 | AssocItem, Callable, Crate, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, |
28 | Module, ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, VariantDef, | 25 | Module, ModuleDef, Name, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, VariantDef, |
29 | }; | 26 | }; |
30 | 27 | ||
31 | #[derive(Debug, Clone, PartialEq, Eq)] | 28 | #[derive(Debug, Clone, PartialEq, Eq)] |
@@ -372,7 +369,7 @@ impl<'db> SemanticsImpl<'db> { | |||
372 | 369 | ||
373 | fn original_range(&self, node: &SyntaxNode) -> FileRange { | 370 | fn original_range(&self, node: &SyntaxNode) -> FileRange { |
374 | let node = self.find_file(node.clone()); | 371 | let node = self.find_file(node.clone()); |
375 | original_range(self.db, node.as_ref()) | 372 | node.as_ref().original_file_range(self.db.upcast()) |
376 | } | 373 | } |
377 | 374 | ||
378 | fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange { | 375 | fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange { |
@@ -380,7 +377,7 @@ impl<'db> SemanticsImpl<'db> { | |||
380 | let root = self.db.parse_or_expand(src.file_id).unwrap(); | 377 | let root = self.db.parse_or_expand(src.file_id).unwrap(); |
381 | let node = src.value.to_node(&root); | 378 | let node = src.value.to_node(&root); |
382 | self.cache(root, src.file_id); | 379 | self.cache(root, src.file_id); |
383 | original_range(self.db, src.with_value(&node)) | 380 | src.with_value(&node).original_file_range(self.db.upcast()) |
384 | } | 381 | } |
385 | 382 | ||
386 | fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { | 383 | fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { |
@@ -771,68 +768,3 @@ impl<'a> SemanticsScope<'a> { | |||
771 | resolve_hir_path(self.db, &self.resolver, &path) | 768 | resolve_hir_path(self.db, &self.resolver, &path) |
772 | } | 769 | } |
773 | } | 770 | } |
774 | |||
775 | // FIXME: Change `HasSource` trait to work with `Semantics` and remove this? | ||
776 | pub fn original_range(db: &dyn HirDatabase, node: InFile<&SyntaxNode>) -> FileRange { | ||
777 | if let Some(range) = original_range_opt(db, node) { | ||
778 | let original_file = range.file_id.original_file(db.upcast()); | ||
779 | if range.file_id == original_file.into() { | ||
780 | return FileRange { file_id: original_file, range: range.value }; | ||
781 | } | ||
782 | |||
783 | log::error!("Fail to mapping up more for {:?}", range); | ||
784 | return FileRange { file_id: range.file_id.original_file(db.upcast()), range: range.value }; | ||
785 | } | ||
786 | |||
787 | // Fall back to whole macro call | ||
788 | if let Some(expansion) = node.file_id.expansion_info(db.upcast()) { | ||
789 | if let Some(call_node) = expansion.call_node() { | ||
790 | return FileRange { | ||
791 | file_id: call_node.file_id.original_file(db.upcast()), | ||
792 | range: call_node.value.text_range(), | ||
793 | }; | ||
794 | } | ||
795 | } | ||
796 | |||
797 | FileRange { file_id: node.file_id.original_file(db.upcast()), range: node.value.text_range() } | ||
798 | } | ||
799 | |||
800 | fn original_range_opt( | ||
801 | db: &dyn HirDatabase, | ||
802 | node: InFile<&SyntaxNode>, | ||
803 | ) -> Option<InFile<TextRange>> { | ||
804 | let expansion = node.file_id.expansion_info(db.upcast())?; | ||
805 | |||
806 | // the input node has only one token ? | ||
807 | let single = skip_trivia_token(node.value.first_token()?, Direction::Next)? | ||
808 | == skip_trivia_token(node.value.last_token()?, Direction::Prev)?; | ||
809 | |||
810 | Some(node.value.descendants().find_map(|it| { | ||
811 | let first = skip_trivia_token(it.first_token()?, Direction::Next)?; | ||
812 | let first = ascend_call_token(db, &expansion, node.with_value(first))?; | ||
813 | |||
814 | let last = skip_trivia_token(it.last_token()?, Direction::Prev)?; | ||
815 | let last = ascend_call_token(db, &expansion, node.with_value(last))?; | ||
816 | |||
817 | if (!single && first == last) || (first.file_id != last.file_id) { | ||
818 | return None; | ||
819 | } | ||
820 | |||
821 | Some(first.with_value(first.value.text_range().cover(last.value.text_range()))) | ||
822 | })?) | ||
823 | } | ||
824 | |||
825 | fn ascend_call_token( | ||
826 | db: &dyn HirDatabase, | ||
827 | expansion: &ExpansionInfo, | ||
828 | token: InFile<SyntaxToken>, | ||
829 | ) -> Option<InFile<SyntaxToken>> { | ||
830 | let (mapped, origin) = expansion.map_token_up(token.as_ref())?; | ||
831 | if origin != Origin::Call { | ||
832 | return None; | ||
833 | } | ||
834 | if let Some(info) = mapped.file_id.expansion_info(db.upcast()) { | ||
835 | return ascend_call_token(db, &info, mapped); | ||
836 | } | ||
837 | Some(mapped) | ||
838 | } | ||
diff --git a/crates/hir_def/src/attr.rs b/crates/hir_def/src/attr.rs index 12f4b02e2..228d706db 100644 --- a/crates/hir_def/src/attr.rs +++ b/crates/hir_def/src/attr.rs | |||
@@ -9,7 +9,7 @@ use itertools::Itertools; | |||
9 | use mbe::ast_to_token_tree; | 9 | use mbe::ast_to_token_tree; |
10 | use syntax::{ | 10 | use syntax::{ |
11 | ast::{self, AstNode, AttrsOwner}, | 11 | ast::{self, AstNode, AttrsOwner}, |
12 | AstToken, SmolStr, | 12 | match_ast, AstToken, SmolStr, SyntaxNode, |
13 | }; | 13 | }; |
14 | use tt::Subtree; | 14 | use tt::Subtree; |
15 | 15 | ||
@@ -110,7 +110,17 @@ impl Attrs { | |||
110 | } | 110 | } |
111 | 111 | ||
112 | pub(crate) fn new(owner: &dyn AttrsOwner, hygiene: &Hygiene) -> Attrs { | 112 | pub(crate) fn new(owner: &dyn AttrsOwner, hygiene: &Hygiene) -> Attrs { |
113 | let docs = ast::CommentIter::from_syntax_node(owner.syntax()).map(|docs_text| { | 113 | let (inner_attrs, inner_docs) = inner_attributes(owner.syntax()) |
114 | .map_or((None, None), |(attrs, docs)| ((Some(attrs), Some(docs)))); | ||
115 | |||
116 | let outer_attrs = owner.attrs().filter(|attr| attr.excl_token().is_none()); | ||
117 | let attrs = outer_attrs | ||
118 | .chain(inner_attrs.into_iter().flatten()) | ||
119 | .map(|attr| (attr.syntax().text_range().start(), Attr::from_src(attr, hygiene))); | ||
120 | |||
121 | let outer_docs = | ||
122 | ast::CommentIter::from_syntax_node(owner.syntax()).filter(ast::Comment::is_outer); | ||
123 | let docs = outer_docs.chain(inner_docs.into_iter().flatten()).map(|docs_text| { | ||
114 | ( | 124 | ( |
115 | docs_text.syntax().text_range().start(), | 125 | docs_text.syntax().text_range().start(), |
116 | docs_text.doc_comment().map(|doc| Attr { | 126 | docs_text.doc_comment().map(|doc| Attr { |
@@ -119,9 +129,6 @@ impl Attrs { | |||
119 | }), | 129 | }), |
120 | ) | 130 | ) |
121 | }); | 131 | }); |
122 | let attrs = owner | ||
123 | .attrs() | ||
124 | .map(|attr| (attr.syntax().text_range().start(), Attr::from_src(attr, hygiene))); | ||
125 | // sort here by syntax node offset because the source can have doc attributes and doc strings be interleaved | 132 | // sort here by syntax node offset because the source can have doc attributes and doc strings be interleaved |
126 | let attrs: Vec<_> = docs.chain(attrs).sorted_by_key(|&(offset, _)| offset).collect(); | 133 | let attrs: Vec<_> = docs.chain(attrs).sorted_by_key(|&(offset, _)| offset).collect(); |
127 | let entries = if attrs.is_empty() { | 134 | let entries = if attrs.is_empty() { |
@@ -184,6 +191,41 @@ impl Attrs { | |||
184 | } | 191 | } |
185 | } | 192 | } |
186 | 193 | ||
194 | fn inner_attributes( | ||
195 | syntax: &SyntaxNode, | ||
196 | ) -> Option<(impl Iterator<Item = ast::Attr>, impl Iterator<Item = ast::Comment>)> { | ||
197 | let (attrs, docs) = match_ast! { | ||
198 | match syntax { | ||
199 | ast::SourceFile(it) => (it.attrs(), ast::CommentIter::from_syntax_node(it.syntax())), | ||
200 | ast::ExternBlock(it) => { | ||
201 | let extern_item_list = it.extern_item_list()?; | ||
202 | (extern_item_list.attrs(), ast::CommentIter::from_syntax_node(extern_item_list.syntax())) | ||
203 | }, | ||
204 | ast::Fn(it) => { | ||
205 | let body = it.body()?; | ||
206 | (body.attrs(), ast::CommentIter::from_syntax_node(body.syntax())) | ||
207 | }, | ||
208 | ast::Impl(it) => { | ||
209 | let assoc_item_list = it.assoc_item_list()?; | ||
210 | (assoc_item_list.attrs(), ast::CommentIter::from_syntax_node(assoc_item_list.syntax())) | ||
211 | }, | ||
212 | ast::Module(it) => { | ||
213 | let item_list = it.item_list()?; | ||
214 | (item_list.attrs(), ast::CommentIter::from_syntax_node(item_list.syntax())) | ||
215 | }, | ||
216 | // FIXME: BlockExpr's only accept inner attributes in specific cases | ||
217 | // Excerpt from the reference: | ||
218 | // Block expressions accept outer and inner attributes, but only when they are the outer | ||
219 | // expression of an expression statement or the final expression of another block expression. | ||
220 | ast::BlockExpr(it) => return None, | ||
221 | _ => return None, | ||
222 | } | ||
223 | }; | ||
224 | let attrs = attrs.filter(|attr| attr.excl_token().is_some()); | ||
225 | let docs = docs.filter(|doc| doc.is_inner()); | ||
226 | Some((attrs, docs)) | ||
227 | } | ||
228 | |||
187 | #[derive(Debug, Clone, PartialEq, Eq)] | 229 | #[derive(Debug, Clone, PartialEq, Eq)] |
188 | pub struct Attr { | 230 | pub struct Attr { |
189 | pub(crate) path: ModPath, | 231 | pub(crate) path: ModPath, |
diff --git a/crates/hir_def/src/body.rs b/crates/hir_def/src/body.rs index 92bcc1705..c5d6f5fb0 100644 --- a/crates/hir_def/src/body.rs +++ b/crates/hir_def/src/body.rs | |||
@@ -103,8 +103,7 @@ impl Expander { | |||
103 | local_scope: Option<&ItemScope>, | 103 | local_scope: Option<&ItemScope>, |
104 | macro_call: ast::MacroCall, | 104 | macro_call: ast::MacroCall, |
105 | ) -> ExpandResult<Option<(Mark, T)>> { | 105 | ) -> ExpandResult<Option<(Mark, T)>> { |
106 | self.recursion_limit += 1; | 106 | if self.recursion_limit + 1 > EXPANSION_RECURSION_LIMIT { |
107 | if self.recursion_limit > EXPANSION_RECURSION_LIMIT { | ||
108 | mark::hit!(your_stack_belongs_to_me); | 107 | mark::hit!(your_stack_belongs_to_me); |
109 | return ExpandResult::str_err("reached recursion limit during macro expansion".into()); | 108 | return ExpandResult::str_err("reached recursion limit during macro expansion".into()); |
110 | } | 109 | } |
@@ -165,6 +164,7 @@ impl Expander { | |||
165 | 164 | ||
166 | log::debug!("macro expansion {:#?}", node.syntax()); | 165 | log::debug!("macro expansion {:#?}", node.syntax()); |
167 | 166 | ||
167 | self.recursion_limit += 1; | ||
168 | let mark = Mark { | 168 | let mark = Mark { |
169 | file_id: self.current_file_id, | 169 | file_id: self.current_file_id, |
170 | ast_id_map: mem::take(&mut self.ast_id_map), | 170 | ast_id_map: mem::take(&mut self.ast_id_map), |
diff --git a/crates/hir_def/src/body/tests.rs b/crates/hir_def/src/body/tests.rs index 6dba9817d..de77d5fc9 100644 --- a/crates/hir_def/src/body/tests.rs +++ b/crates/hir_def/src/body/tests.rs | |||
@@ -134,3 +134,31 @@ fn f() { | |||
134 | "#, | 134 | "#, |
135 | ); | 135 | ); |
136 | } | 136 | } |
137 | |||
138 | #[test] | ||
139 | fn dollar_crate_in_builtin_macro() { | ||
140 | check_diagnostics( | ||
141 | r#" | ||
142 | #[macro_export] | ||
143 | #[rustc_builtin_macro] | ||
144 | macro_rules! format_args {} | ||
145 | |||
146 | #[macro_export] | ||
147 | macro_rules! arg { | ||
148 | () => {} | ||
149 | } | ||
150 | |||
151 | #[macro_export] | ||
152 | macro_rules! outer { | ||
153 | () => { | ||
154 | $crate::format_args!( "", $crate::arg!(1) ) | ||
155 | }; | ||
156 | } | ||
157 | |||
158 | fn f() { | ||
159 | outer!(); | ||
160 | //^^^^^^^^ leftover tokens | ||
161 | } | ||
162 | "#, | ||
163 | ) | ||
164 | } | ||
diff --git a/crates/hir_def/src/data.rs b/crates/hir_def/src/data.rs index 146045938..dd3a906af 100644 --- a/crates/hir_def/src/data.rs +++ b/crates/hir_def/src/data.rs | |||
@@ -28,6 +28,7 @@ pub struct FunctionData { | |||
28 | pub has_body: bool, | 28 | pub has_body: bool, |
29 | pub is_unsafe: bool, | 29 | pub is_unsafe: bool, |
30 | pub is_varargs: bool, | 30 | pub is_varargs: bool, |
31 | pub is_extern: bool, | ||
31 | pub visibility: RawVisibility, | 32 | pub visibility: RawVisibility, |
32 | } | 33 | } |
33 | 34 | ||
@@ -46,6 +47,7 @@ impl FunctionData { | |||
46 | has_body: func.has_body, | 47 | has_body: func.has_body, |
47 | is_unsafe: func.is_unsafe, | 48 | is_unsafe: func.is_unsafe, |
48 | is_varargs: func.is_varargs, | 49 | is_varargs: func.is_varargs, |
50 | is_extern: func.is_extern, | ||
49 | visibility: item_tree[func.visibility].clone(), | 51 | visibility: item_tree[func.visibility].clone(), |
50 | }) | 52 | }) |
51 | } | 53 | } |
@@ -191,6 +193,7 @@ pub struct StaticData { | |||
191 | pub type_ref: TypeRef, | 193 | pub type_ref: TypeRef, |
192 | pub visibility: RawVisibility, | 194 | pub visibility: RawVisibility, |
193 | pub mutable: bool, | 195 | pub mutable: bool, |
196 | pub is_extern: bool, | ||
194 | } | 197 | } |
195 | 198 | ||
196 | impl StaticData { | 199 | impl StaticData { |
@@ -204,6 +207,7 @@ impl StaticData { | |||
204 | type_ref: statik.type_ref.clone(), | 207 | type_ref: statik.type_ref.clone(), |
205 | visibility: item_tree[statik.visibility].clone(), | 208 | visibility: item_tree[statik.visibility].clone(), |
206 | mutable: statik.mutable, | 209 | mutable: statik.mutable, |
210 | is_extern: statik.is_extern, | ||
207 | }) | 211 | }) |
208 | } | 212 | } |
209 | } | 213 | } |
diff --git a/crates/hir_def/src/item_tree.rs b/crates/hir_def/src/item_tree.rs index 7eb388bae..c017b352d 100644 --- a/crates/hir_def/src/item_tree.rs +++ b/crates/hir_def/src/item_tree.rs | |||
@@ -1,8 +1,6 @@ | |||
1 | //! A simplified AST that only contains items. | 1 | //! A simplified AST that only contains items. |
2 | 2 | ||
3 | mod lower; | 3 | mod lower; |
4 | #[cfg(test)] | ||
5 | mod tests; | ||
6 | 4 | ||
7 | use std::{ | 5 | use std::{ |
8 | any::type_name, | 6 | any::type_name, |
@@ -507,6 +505,9 @@ pub struct Function { | |||
507 | pub has_self_param: bool, | 505 | pub has_self_param: bool, |
508 | pub has_body: bool, | 506 | pub has_body: bool, |
509 | pub is_unsafe: bool, | 507 | pub is_unsafe: bool, |
508 | /// Whether the function is located in an `extern` block (*not* whether it is an | ||
509 | /// `extern "abi" fn`). | ||
510 | pub is_extern: bool, | ||
510 | pub params: Box<[TypeRef]>, | 511 | pub params: Box<[TypeRef]>, |
511 | pub is_varargs: bool, | 512 | pub is_varargs: bool, |
512 | pub ret_type: TypeRef, | 513 | pub ret_type: TypeRef, |
@@ -565,6 +566,8 @@ pub struct Static { | |||
565 | pub name: Name, | 566 | pub name: Name, |
566 | pub visibility: RawVisibilityId, | 567 | pub visibility: RawVisibilityId, |
567 | pub mutable: bool, | 568 | pub mutable: bool, |
569 | /// Whether the static is in an `extern` block. | ||
570 | pub is_extern: bool, | ||
568 | pub type_ref: TypeRef, | 571 | pub type_ref: TypeRef, |
569 | pub ast_id: FileAstId<ast::Static>, | 572 | pub ast_id: FileAstId<ast::Static>, |
570 | } | 573 | } |
diff --git a/crates/hir_def/src/item_tree/lower.rs b/crates/hir_def/src/item_tree/lower.rs index ca7fb4a43..63b2826f8 100644 --- a/crates/hir_def/src/item_tree/lower.rs +++ b/crates/hir_def/src/item_tree/lower.rs | |||
@@ -340,6 +340,7 @@ impl Ctx { | |||
340 | has_self_param, | 340 | has_self_param, |
341 | has_body, | 341 | has_body, |
342 | is_unsafe: func.unsafe_token().is_some(), | 342 | is_unsafe: func.unsafe_token().is_some(), |
343 | is_extern: false, | ||
343 | params: params.into_boxed_slice(), | 344 | params: params.into_boxed_slice(), |
344 | is_varargs, | 345 | is_varargs, |
345 | ret_type, | 346 | ret_type, |
@@ -378,7 +379,7 @@ impl Ctx { | |||
378 | let visibility = self.lower_visibility(static_); | 379 | let visibility = self.lower_visibility(static_); |
379 | let mutable = static_.mut_token().is_some(); | 380 | let mutable = static_.mut_token().is_some(); |
380 | let ast_id = self.source_ast_id_map.ast_id(static_); | 381 | let ast_id = self.source_ast_id_map.ast_id(static_); |
381 | let res = Static { name, visibility, mutable, type_ref, ast_id }; | 382 | let res = Static { name, visibility, mutable, type_ref, ast_id, is_extern: false }; |
382 | Some(id(self.data().statics.alloc(res))) | 383 | Some(id(self.data().statics.alloc(res))) |
383 | } | 384 | } |
384 | 385 | ||
@@ -554,13 +555,15 @@ impl Ctx { | |||
554 | let attrs = Attrs::new(&item, &self.hygiene); | 555 | let attrs = Attrs::new(&item, &self.hygiene); |
555 | let id: ModItem = match item { | 556 | let id: ModItem = match item { |
556 | ast::ExternItem::Fn(ast) => { | 557 | ast::ExternItem::Fn(ast) => { |
557 | let func = self.lower_function(&ast)?; | 558 | let func_id = self.lower_function(&ast)?; |
558 | self.data().functions[func.index].is_unsafe = | 559 | let func = &mut self.data().functions[func_id.index]; |
559 | is_intrinsic_fn_unsafe(&self.data().functions[func.index].name); | 560 | func.is_unsafe = is_intrinsic_fn_unsafe(&func.name); |
560 | func.into() | 561 | func.is_extern = true; |
562 | func_id.into() | ||
561 | } | 563 | } |
562 | ast::ExternItem::Static(ast) => { | 564 | ast::ExternItem::Static(ast) => { |
563 | let statik = self.lower_static(&ast)?; | 565 | let statik = self.lower_static(&ast)?; |
566 | self.data().statics[statik.index].is_extern = true; | ||
564 | statik.into() | 567 | statik.into() |
565 | } | 568 | } |
566 | ast::ExternItem::TypeAlias(ty) => { | 569 | ast::ExternItem::TypeAlias(ty) => { |
diff --git a/crates/hir_def/src/item_tree/tests.rs b/crates/hir_def/src/item_tree/tests.rs deleted file mode 100644 index 4b354c4c1..000000000 --- a/crates/hir_def/src/item_tree/tests.rs +++ /dev/null | |||
@@ -1,439 +0,0 @@ | |||
1 | use base_db::fixture::WithFixture; | ||
2 | use expect_test::{expect, Expect}; | ||
3 | use hir_expand::{db::AstDatabase, HirFileId, InFile}; | ||
4 | use rustc_hash::FxHashSet; | ||
5 | use std::sync::Arc; | ||
6 | use stdx::format_to; | ||
7 | use syntax::{ast, AstNode}; | ||
8 | |||
9 | use crate::{db::DefDatabase, test_db::TestDB}; | ||
10 | |||
11 | use super::{ItemTree, ModItem, ModKind}; | ||
12 | |||
13 | fn test_inner_items(ra_fixture: &str) { | ||
14 | let (db, file_id) = TestDB::with_single_file(ra_fixture); | ||
15 | let file_id = HirFileId::from(file_id); | ||
16 | let tree = db.item_tree(file_id); | ||
17 | let root = db.parse_or_expand(file_id).unwrap(); | ||
18 | let ast_id_map = db.ast_id_map(file_id); | ||
19 | |||
20 | // Traverse the item tree and collect all module/impl/trait-level items as AST nodes. | ||
21 | let mut outer_items = FxHashSet::default(); | ||
22 | let mut worklist = tree.top_level_items().to_vec(); | ||
23 | while let Some(item) = worklist.pop() { | ||
24 | let node: ast::Item = match item { | ||
25 | ModItem::Import(it) => tree.source(&db, InFile::new(file_id, it)).into(), | ||
26 | ModItem::ExternCrate(it) => tree.source(&db, InFile::new(file_id, it)).into(), | ||
27 | ModItem::Function(it) => tree.source(&db, InFile::new(file_id, it)).into(), | ||
28 | ModItem::Struct(it) => tree.source(&db, InFile::new(file_id, it)).into(), | ||
29 | ModItem::Union(it) => tree.source(&db, InFile::new(file_id, it)).into(), | ||
30 | ModItem::Enum(it) => tree.source(&db, InFile::new(file_id, it)).into(), | ||
31 | ModItem::Const(it) => tree.source(&db, InFile::new(file_id, it)).into(), | ||
32 | ModItem::Static(it) => tree.source(&db, InFile::new(file_id, it)).into(), | ||
33 | ModItem::TypeAlias(it) => tree.source(&db, InFile::new(file_id, it)).into(), | ||
34 | ModItem::Mod(it) => { | ||
35 | if let ModKind::Inline { items } = &tree[it].kind { | ||
36 | worklist.extend(&**items); | ||
37 | } | ||
38 | tree.source(&db, InFile::new(file_id, it)).into() | ||
39 | } | ||
40 | ModItem::Trait(it) => { | ||
41 | worklist.extend(tree[it].items.iter().map(|item| ModItem::from(*item))); | ||
42 | tree.source(&db, InFile::new(file_id, it)).into() | ||
43 | } | ||
44 | ModItem::Impl(it) => { | ||
45 | worklist.extend(tree[it].items.iter().map(|item| ModItem::from(*item))); | ||
46 | tree.source(&db, InFile::new(file_id, it)).into() | ||
47 | } | ||
48 | ModItem::MacroCall(_) => continue, | ||
49 | }; | ||
50 | |||
51 | outer_items.insert(node); | ||
52 | } | ||
53 | |||
54 | // Now descend the root node and check that all `ast::ModuleItem`s are either recorded above, or | ||
55 | // registered as inner items. | ||
56 | for item in root.descendants().skip(1).filter_map(ast::Item::cast) { | ||
57 | if outer_items.contains(&item) { | ||
58 | continue; | ||
59 | } | ||
60 | |||
61 | let ast_id = ast_id_map.ast_id(&item); | ||
62 | assert!(!tree.inner_items(ast_id).is_empty()); | ||
63 | } | ||
64 | } | ||
65 | |||
66 | fn item_tree(ra_fixture: &str) -> Arc<ItemTree> { | ||
67 | let (db, file_id) = TestDB::with_single_file(ra_fixture); | ||
68 | db.item_tree(file_id.into()) | ||
69 | } | ||
70 | |||
71 | fn print_item_tree(ra_fixture: &str) -> String { | ||
72 | let tree = item_tree(ra_fixture); | ||
73 | let mut out = String::new(); | ||
74 | |||
75 | format_to!(out, "inner attrs: {:?}\n\n", tree.top_level_attrs()); | ||
76 | format_to!(out, "top-level items:\n"); | ||
77 | for item in tree.top_level_items() { | ||
78 | fmt_mod_item(&mut out, &tree, *item); | ||
79 | format_to!(out, "\n"); | ||
80 | } | ||
81 | |||
82 | if !tree.inner_items.is_empty() { | ||
83 | format_to!(out, "\ninner items:\n\n"); | ||
84 | for (ast_id, items) in &tree.inner_items { | ||
85 | format_to!(out, "for AST {:?}:\n", ast_id); | ||
86 | for inner in items { | ||
87 | fmt_mod_item(&mut out, &tree, *inner); | ||
88 | format_to!(out, "\n\n"); | ||
89 | } | ||
90 | } | ||
91 | } | ||
92 | |||
93 | out | ||
94 | } | ||
95 | |||
96 | fn fmt_mod_item(out: &mut String, tree: &ItemTree, item: ModItem) { | ||
97 | let attrs = tree.attrs(item.into()); | ||
98 | if !attrs.is_empty() { | ||
99 | format_to!(out, "#[{:?}]\n", attrs); | ||
100 | } | ||
101 | |||
102 | let mut children = String::new(); | ||
103 | match item { | ||
104 | ModItem::ExternCrate(it) => { | ||
105 | format_to!(out, "{:?}", tree[it]); | ||
106 | } | ||
107 | ModItem::Import(it) => { | ||
108 | format_to!(out, "{:?}", tree[it]); | ||
109 | } | ||
110 | ModItem::Function(it) => { | ||
111 | format_to!(out, "{:?}", tree[it]); | ||
112 | } | ||
113 | ModItem::Struct(it) => { | ||
114 | format_to!(out, "{:?}", tree[it]); | ||
115 | } | ||
116 | ModItem::Union(it) => { | ||
117 | format_to!(out, "{:?}", tree[it]); | ||
118 | } | ||
119 | ModItem::Enum(it) => { | ||
120 | format_to!(out, "{:?}", tree[it]); | ||
121 | } | ||
122 | ModItem::Const(it) => { | ||
123 | format_to!(out, "{:?}", tree[it]); | ||
124 | } | ||
125 | ModItem::Static(it) => { | ||
126 | format_to!(out, "{:?}", tree[it]); | ||
127 | } | ||
128 | ModItem::Trait(it) => { | ||
129 | format_to!(out, "{:?}", tree[it]); | ||
130 | for item in &*tree[it].items { | ||
131 | fmt_mod_item(&mut children, tree, ModItem::from(*item)); | ||
132 | format_to!(children, "\n"); | ||
133 | } | ||
134 | } | ||
135 | ModItem::Impl(it) => { | ||
136 | format_to!(out, "{:?}", tree[it]); | ||
137 | for item in &*tree[it].items { | ||
138 | fmt_mod_item(&mut children, tree, ModItem::from(*item)); | ||
139 | format_to!(children, "\n"); | ||
140 | } | ||
141 | } | ||
142 | ModItem::TypeAlias(it) => { | ||
143 | format_to!(out, "{:?}", tree[it]); | ||
144 | } | ||
145 | ModItem::Mod(it) => { | ||
146 | format_to!(out, "{:?}", tree[it]); | ||
147 | match &tree[it].kind { | ||
148 | ModKind::Inline { items } => { | ||
149 | for item in &**items { | ||
150 | fmt_mod_item(&mut children, tree, *item); | ||
151 | format_to!(children, "\n"); | ||
152 | } | ||
153 | } | ||
154 | ModKind::Outline {} => {} | ||
155 | } | ||
156 | } | ||
157 | ModItem::MacroCall(it) => { | ||
158 | format_to!(out, "{:?}", tree[it]); | ||
159 | } | ||
160 | } | ||
161 | |||
162 | for line in children.lines() { | ||
163 | format_to!(out, "\n> {}", line); | ||
164 | } | ||
165 | } | ||
166 | |||
167 | fn check(ra_fixture: &str, expect: Expect) { | ||
168 | let actual = print_item_tree(ra_fixture); | ||
169 | expect.assert_eq(&actual); | ||
170 | } | ||
171 | |||
172 | #[test] | ||
173 | fn smoke() { | ||
174 | check( | ||
175 | r" | ||
176 | #![attr] | ||
177 | |||
178 | #[attr_on_use] | ||
179 | use {a, b::*}; | ||
180 | |||
181 | #[ext_crate] | ||
182 | extern crate krate; | ||
183 | |||
184 | #[on_trait] | ||
185 | trait Tr<U> { | ||
186 | #[assoc_ty] | ||
187 | type AssocTy: Tr<()>; | ||
188 | |||
189 | #[assoc_const] | ||
190 | const CONST: u8; | ||
191 | |||
192 | #[assoc_method] | ||
193 | fn method(&self); | ||
194 | |||
195 | #[assoc_dfl_method] | ||
196 | fn dfl_method(&mut self) {} | ||
197 | } | ||
198 | |||
199 | #[struct0] | ||
200 | struct Struct0<T = ()>; | ||
201 | |||
202 | #[struct1] | ||
203 | struct Struct1<T>(#[struct1fld] u8); | ||
204 | |||
205 | #[struct2] | ||
206 | struct Struct2<T> { | ||
207 | #[struct2fld] | ||
208 | fld: (T, ), | ||
209 | } | ||
210 | |||
211 | #[en] | ||
212 | enum En { | ||
213 | #[enum_variant] | ||
214 | Variant { | ||
215 | #[enum_field] | ||
216 | field: u8, | ||
217 | }, | ||
218 | } | ||
219 | |||
220 | #[un] | ||
221 | union Un { | ||
222 | #[union_fld] | ||
223 | fld: u16, | ||
224 | } | ||
225 | ", | ||
226 | expect![[r##" | ||
227 | inner attrs: Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr"))] }, input: None }]) } | ||
228 | |||
229 | top-level items: | ||
230 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_on_use"))] }, input: None }]) }] | ||
231 | Import { path: ModPath { kind: Plain, segments: [Name(Text("a"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_glob: false, is_prelude: false, ast_id: FileAstId::<syntax::ast::generated::nodes::Use>(0), index: 0 } | ||
232 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_on_use"))] }, input: None }]) }] | ||
233 | Import { path: ModPath { kind: Plain, segments: [Name(Text("b"))] }, alias: None, visibility: RawVisibilityId("pub(self)"), is_glob: true, is_prelude: false, ast_id: FileAstId::<syntax::ast::generated::nodes::Use>(0), index: 1 } | ||
234 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("ext_crate"))] }, input: None }]) }] | ||
235 | ExternCrate { name: Name(Text("krate")), alias: None, visibility: RawVisibilityId("pub(self)"), is_macro_use: false, ast_id: FileAstId::<syntax::ast::generated::nodes::ExternCrate>(1) } | ||
236 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("on_trait"))] }, input: None }]) }] | ||
237 | Trait { name: Name(Text("Tr")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(0), auto: false, items: [TypeAlias(Idx::<TypeAlias>(0)), Const(Idx::<Const>(0)), Function(Idx::<Function>(0)), Function(Idx::<Function>(1))], ast_id: FileAstId::<syntax::ast::generated::nodes::Trait>(2) } | ||
238 | > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_ty"))] }, input: None }]) }] | ||
239 | > TypeAlias { name: Name(Text("AssocTy")), visibility: RawVisibilityId("pub(self)"), bounds: [Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Tr"))] }, generic_args: [Some(GenericArgs { args: [Type(Tuple([]))], has_self_type: false, bindings: [] })] })], generic_params: GenericParamsId(4294967295), type_ref: None, is_extern: false, ast_id: FileAstId::<syntax::ast::generated::nodes::TypeAlias>(8) } | ||
240 | > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_const"))] }, input: None }]) }] | ||
241 | > Const { name: Some(Name(Text("CONST"))), visibility: RawVisibilityId("pub(self)"), type_ref: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("u8"))] }, generic_args: [None] }), ast_id: FileAstId::<syntax::ast::generated::nodes::Const>(9) } | ||
242 | > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_method"))] }, input: None }]) }] | ||
243 | > Function { name: Name(Text("method")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: true, has_body: false, is_unsafe: false, params: [Reference(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Self"))] }, generic_args: [None] }), Shared)], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(10) } | ||
244 | > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("assoc_dfl_method"))] }, input: None }]) }] | ||
245 | > Function { name: Name(Text("dfl_method")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: true, has_body: true, is_unsafe: false, params: [Reference(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Self"))] }, generic_args: [None] }), Mut)], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(11) } | ||
246 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct0"))] }, input: None }]) }] | ||
247 | Struct { name: Name(Text("Struct0")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(1), fields: Unit, ast_id: FileAstId::<syntax::ast::generated::nodes::Struct>(3), kind: Unit } | ||
248 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct1"))] }, input: None }]) }] | ||
249 | Struct { name: Name(Text("Struct1")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(2), fields: Tuple(IdRange::<hir_def::item_tree::Field>(0..1)), ast_id: FileAstId::<syntax::ast::generated::nodes::Struct>(4), kind: Tuple } | ||
250 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("struct2"))] }, input: None }]) }] | ||
251 | Struct { name: Name(Text("Struct2")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(3), fields: Record(IdRange::<hir_def::item_tree::Field>(1..2)), ast_id: FileAstId::<syntax::ast::generated::nodes::Struct>(5), kind: Record } | ||
252 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("en"))] }, input: None }]) }] | ||
253 | Enum { name: Name(Text("En")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), variants: IdRange::<hir_def::item_tree::Variant>(0..1), ast_id: FileAstId::<syntax::ast::generated::nodes::Enum>(6) } | ||
254 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("un"))] }, input: None }]) }] | ||
255 | Union { name: Name(Text("Un")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), fields: Record(IdRange::<hir_def::item_tree::Field>(3..4)), ast_id: FileAstId::<syntax::ast::generated::nodes::Union>(7) } | ||
256 | "##]], | ||
257 | ); | ||
258 | } | ||
259 | |||
260 | #[test] | ||
261 | fn simple_inner_items() { | ||
262 | check( | ||
263 | r" | ||
264 | impl<T:A> D for Response<T> { | ||
265 | fn foo() { | ||
266 | end(); | ||
267 | fn end<W: Write>() { | ||
268 | let _x: T = loop {}; | ||
269 | } | ||
270 | } | ||
271 | } | ||
272 | ", | ||
273 | expect![[r#" | ||
274 | inner attrs: Attrs { entries: None } | ||
275 | |||
276 | top-level items: | ||
277 | Impl { generic_params: GenericParamsId(0), target_trait: Some(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("D"))] }, generic_args: [None] })), target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Response"))] }, generic_args: [Some(GenericArgs { args: [Type(Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("T"))] }, generic_args: [None] }))], has_self_type: false, bindings: [] })] }), is_negative: false, items: [Function(Idx::<Function>(1))], ast_id: FileAstId::<syntax::ast::generated::nodes::Impl>(0) } | ||
278 | > Function { name: Name(Text("foo")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, has_body: true, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(1) } | ||
279 | |||
280 | inner items: | ||
281 | |||
282 | for AST FileAstId::<syntax::ast::generated::nodes::Item>(2): | ||
283 | Function { name: Name(Text("end")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(1), has_self_param: false, has_body: true, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(2) } | ||
284 | |||
285 | "#]], | ||
286 | ); | ||
287 | } | ||
288 | |||
289 | #[test] | ||
290 | fn extern_attrs() { | ||
291 | check( | ||
292 | r#" | ||
293 | #[block_attr] | ||
294 | extern "C" { | ||
295 | #[attr_a] | ||
296 | fn a() {} | ||
297 | #[attr_b] | ||
298 | fn b() {} | ||
299 | } | ||
300 | "#, | ||
301 | expect![[r##" | ||
302 | inner attrs: Attrs { entries: None } | ||
303 | |||
304 | top-level items: | ||
305 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_a"))] }, input: None }, Attr { path: ModPath { kind: Plain, segments: [Name(Text("block_attr"))] }, input: None }]) }] | ||
306 | Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, has_body: true, is_unsafe: true, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(1) } | ||
307 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_b"))] }, input: None }, Attr { path: ModPath { kind: Plain, segments: [Name(Text("block_attr"))] }, input: None }]) }] | ||
308 | Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, has_body: true, is_unsafe: true, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(2) } | ||
309 | "##]], | ||
310 | ); | ||
311 | } | ||
312 | |||
313 | #[test] | ||
314 | fn trait_attrs() { | ||
315 | check( | ||
316 | r#" | ||
317 | #[trait_attr] | ||
318 | trait Tr { | ||
319 | #[attr_a] | ||
320 | fn a() {} | ||
321 | #[attr_b] | ||
322 | fn b() {} | ||
323 | } | ||
324 | "#, | ||
325 | expect![[r##" | ||
326 | inner attrs: Attrs { entries: None } | ||
327 | |||
328 | top-level items: | ||
329 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("trait_attr"))] }, input: None }]) }] | ||
330 | Trait { name: Name(Text("Tr")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(0), auto: false, items: [Function(Idx::<Function>(0)), Function(Idx::<Function>(1))], ast_id: FileAstId::<syntax::ast::generated::nodes::Trait>(0) } | ||
331 | > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_a"))] }, input: None }]) }] | ||
332 | > Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, has_body: true, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(1) } | ||
333 | > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_b"))] }, input: None }]) }] | ||
334 | > Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, has_body: true, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(2) } | ||
335 | "##]], | ||
336 | ); | ||
337 | } | ||
338 | |||
339 | #[test] | ||
340 | fn impl_attrs() { | ||
341 | check( | ||
342 | r#" | ||
343 | #[impl_attr] | ||
344 | impl Ty { | ||
345 | #[attr_a] | ||
346 | fn a() {} | ||
347 | #[attr_b] | ||
348 | fn b() {} | ||
349 | } | ||
350 | "#, | ||
351 | expect![[r##" | ||
352 | inner attrs: Attrs { entries: None } | ||
353 | |||
354 | top-level items: | ||
355 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("impl_attr"))] }, input: None }]) }] | ||
356 | Impl { generic_params: GenericParamsId(4294967295), target_trait: None, target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("Ty"))] }, generic_args: [None] }), is_negative: false, items: [Function(Idx::<Function>(0)), Function(Idx::<Function>(1))], ast_id: FileAstId::<syntax::ast::generated::nodes::Impl>(0) } | ||
357 | > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_a"))] }, input: None }]) }] | ||
358 | > Function { name: Name(Text("a")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, has_body: true, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(1) } | ||
359 | > #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("attr_b"))] }, input: None }]) }] | ||
360 | > Function { name: Name(Text("b")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, has_body: true, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(2) } | ||
361 | "##]], | ||
362 | ); | ||
363 | } | ||
364 | |||
365 | #[test] | ||
366 | fn cursed_inner_items() { | ||
367 | test_inner_items( | ||
368 | r" | ||
369 | struct S<T: Trait = [u8; { fn f() {} 0 }]>(T); | ||
370 | |||
371 | enum En { | ||
372 | Var1 { | ||
373 | t: [(); { trait Inner {} 0 }], | ||
374 | }, | ||
375 | |||
376 | Var2([u16; { enum Inner {} 0 }]), | ||
377 | } | ||
378 | |||
379 | type Ty = [En; { struct Inner; 0 }]; | ||
380 | |||
381 | impl En { | ||
382 | fn assoc() { | ||
383 | trait InnerTrait<T = [u8; { fn f() {} }]> {} | ||
384 | struct InnerStruct<T = [u8; { fn f() {} }]> {} | ||
385 | impl<T = [u8; { fn f() {} }]> InnerTrait for InnerStruct {} | ||
386 | } | ||
387 | } | ||
388 | |||
389 | trait Tr<T = [u8; { fn f() {} }]> { | ||
390 | type AssocTy = [u8; { fn f() {} }]; | ||
391 | |||
392 | const AssocConst: [u8; { fn f() {} }]; | ||
393 | } | ||
394 | ", | ||
395 | ); | ||
396 | } | ||
397 | |||
398 | #[test] | ||
399 | fn inner_item_attrs() { | ||
400 | check( | ||
401 | r" | ||
402 | fn foo() { | ||
403 | #[on_inner] | ||
404 | fn inner() {} | ||
405 | } | ||
406 | ", | ||
407 | expect![[r##" | ||
408 | inner attrs: Attrs { entries: None } | ||
409 | |||
410 | top-level items: | ||
411 | Function { name: Name(Text("foo")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, has_body: true, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(0) } | ||
412 | |||
413 | inner items: | ||
414 | |||
415 | for AST FileAstId::<syntax::ast::generated::nodes::Item>(1): | ||
416 | #[Attrs { entries: Some([Attr { path: ModPath { kind: Plain, segments: [Name(Text("on_inner"))] }, input: None }]) }] | ||
417 | Function { name: Name(Text("inner")), visibility: RawVisibilityId("pub(self)"), generic_params: GenericParamsId(4294967295), has_self_param: false, has_body: true, is_unsafe: false, params: [], is_varargs: false, ret_type: Tuple([]), ast_id: FileAstId::<syntax::ast::generated::nodes::Fn>(1) } | ||
418 | |||
419 | "##]], | ||
420 | ); | ||
421 | } | ||
422 | |||
423 | #[test] | ||
424 | fn assoc_item_macros() { | ||
425 | check( | ||
426 | r" | ||
427 | impl S { | ||
428 | items!(); | ||
429 | } | ||
430 | ", | ||
431 | expect![[r#" | ||
432 | inner attrs: Attrs { entries: None } | ||
433 | |||
434 | top-level items: | ||
435 | Impl { generic_params: GenericParamsId(4294967295), target_trait: None, target_type: Path(Path { type_anchor: None, mod_path: ModPath { kind: Plain, segments: [Name(Text("S"))] }, generic_args: [None] }), is_negative: false, items: [MacroCall(Idx::<MacroCall>(0))], ast_id: FileAstId::<syntax::ast::generated::nodes::Impl>(0) } | ||
436 | > MacroCall { name: None, path: ModPath { kind: Plain, segments: [Name(Text("items"))] }, is_export: false, is_local_inner: false, is_builtin: false, ast_id: FileAstId::<syntax::ast::generated::nodes::MacroCall>(1) } | ||
437 | "#]], | ||
438 | ); | ||
439 | } | ||
diff --git a/crates/hir_def/src/nameres/mod_resolution.rs b/crates/hir_def/src/nameres/mod_resolution.rs index c0c789cae..b4ccd4488 100644 --- a/crates/hir_def/src/nameres/mod_resolution.rs +++ b/crates/hir_def/src/nameres/mod_resolution.rs | |||
@@ -1,5 +1,5 @@ | |||
1 | //! This module resolves `mod foo;` declaration to file. | 1 | //! This module resolves `mod foo;` declaration to file. |
2 | use base_db::FileId; | 2 | use base_db::{AnchoredPath, FileId}; |
3 | use hir_expand::name::Name; | 3 | use hir_expand::name::Name; |
4 | use syntax::SmolStr; | 4 | use syntax::SmolStr; |
5 | use test_utils::mark; | 5 | use test_utils::mark; |
@@ -77,7 +77,8 @@ impl ModDir { | |||
77 | }; | 77 | }; |
78 | 78 | ||
79 | for candidate in candidate_files.iter() { | 79 | for candidate in candidate_files.iter() { |
80 | if let Some(file_id) = db.resolve_path(file_id, candidate.as_str()) { | 80 | let path = AnchoredPath { anchor: file_id, path: candidate.as_str() }; |
81 | if let Some(file_id) = db.resolve_path(path) { | ||
81 | let is_mod_rs = candidate.ends_with("mod.rs"); | 82 | let is_mod_rs = candidate.ends_with("mod.rs"); |
82 | 83 | ||
83 | let (dir_path, root_non_dir_owner) = if is_mod_rs || attr_path.is_some() { | 84 | let (dir_path, root_non_dir_owner) = if is_mod_rs || attr_path.is_some() { |
diff --git a/crates/hir_def/src/test_db.rs b/crates/hir_def/src/test_db.rs index 00fe711fe..574c0201a 100644 --- a/crates/hir_def/src/test_db.rs +++ b/crates/hir_def/src/test_db.rs | |||
@@ -5,8 +5,8 @@ use std::{ | |||
5 | sync::{Arc, Mutex}, | 5 | sync::{Arc, Mutex}, |
6 | }; | 6 | }; |
7 | 7 | ||
8 | use base_db::SourceDatabase; | ||
9 | use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, Upcast}; | 8 | use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, Upcast}; |
9 | use base_db::{AnchoredPath, SourceDatabase}; | ||
10 | use hir_expand::db::AstDatabase; | 10 | use hir_expand::db::AstDatabase; |
11 | use hir_expand::diagnostics::Diagnostic; | 11 | use hir_expand::diagnostics::Diagnostic; |
12 | use hir_expand::diagnostics::DiagnosticSinkBuilder; | 12 | use hir_expand::diagnostics::DiagnosticSinkBuilder; |
@@ -63,8 +63,8 @@ impl FileLoader for TestDB { | |||
63 | fn file_text(&self, file_id: FileId) -> Arc<String> { | 63 | fn file_text(&self, file_id: FileId) -> Arc<String> { |
64 | FileLoaderDelegate(self).file_text(file_id) | 64 | FileLoaderDelegate(self).file_text(file_id) |
65 | } | 65 | } |
66 | fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { | 66 | fn resolve_path(&self, path: AnchoredPath) -> Option<FileId> { |
67 | FileLoaderDelegate(self).resolve_path(anchor, path) | 67 | FileLoaderDelegate(self).resolve_path(path) |
68 | } | 68 | } |
69 | fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { | 69 | fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { |
70 | FileLoaderDelegate(self).relevant_crates(file_id) | 70 | FileLoaderDelegate(self).relevant_crates(file_id) |
@@ -157,11 +157,12 @@ impl TestDB { | |||
157 | db.diagnostics(|d| { | 157 | db.diagnostics(|d| { |
158 | let src = d.display_source(); | 158 | let src = d.display_source(); |
159 | let root = db.parse_or_expand(src.file_id).unwrap(); | 159 | let root = db.parse_or_expand(src.file_id).unwrap(); |
160 | // FIXME: macros... | 160 | |
161 | let file_id = src.file_id.original_file(db); | 161 | let node = src.map(|ptr| ptr.to_node(&root)); |
162 | let range = src.value.to_node(&root).text_range(); | 162 | let frange = node.as_ref().original_file_range(db); |
163 | |||
163 | let message = d.message().to_owned(); | 164 | let message = d.message().to_owned(); |
164 | actual.entry(file_id).or_default().push((range, message)); | 165 | actual.entry(frange.file_id).or_default().push((frange.range, message)); |
165 | }); | 166 | }); |
166 | 167 | ||
167 | for (file_id, diags) in actual.iter_mut() { | 168 | for (file_id, diags) in actual.iter_mut() { |
diff --git a/crates/hir_expand/src/builtin_macro.rs b/crates/hir_expand/src/builtin_macro.rs index 44a5556b6..477192a09 100644 --- a/crates/hir_expand/src/builtin_macro.rs +++ b/crates/hir_expand/src/builtin_macro.rs | |||
@@ -4,7 +4,7 @@ use crate::{ | |||
4 | MacroDefId, MacroDefKind, TextSize, | 4 | MacroDefId, MacroDefKind, TextSize, |
5 | }; | 5 | }; |
6 | 6 | ||
7 | use base_db::FileId; | 7 | use base_db::{AnchoredPath, FileId}; |
8 | use either::Either; | 8 | use either::Either; |
9 | use mbe::{parse_to_token_tree, ExpandResult}; | 9 | use mbe::{parse_to_token_tree, ExpandResult}; |
10 | use parser::FragmentKind; | 10 | use parser::FragmentKind; |
@@ -245,6 +245,12 @@ fn format_args_expand( | |||
245 | if args.is_empty() { | 245 | if args.is_empty() { |
246 | return ExpandResult::only_err(mbe::ExpandError::NoMatchingRule); | 246 | return ExpandResult::only_err(mbe::ExpandError::NoMatchingRule); |
247 | } | 247 | } |
248 | for arg in &mut args { | ||
249 | // Remove `key =`. | ||
250 | if matches!(arg.get(1), Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p))) if p.char == '=') { | ||
251 | arg.drain(..2); | ||
252 | } | ||
253 | } | ||
248 | let _format_string = args.remove(0); | 254 | let _format_string = args.remove(0); |
249 | let arg_tts = args.into_iter().flat_map(|arg| { | 255 | let arg_tts = args.into_iter().flat_map(|arg| { |
250 | quote! { std::fmt::ArgumentV1::new(&(##arg), std::fmt::Display::fmt), } | 256 | quote! { std::fmt::ArgumentV1::new(&(##arg), std::fmt::Display::fmt), } |
@@ -287,23 +293,34 @@ fn concat_expand( | |||
287 | _arg_id: EagerMacroId, | 293 | _arg_id: EagerMacroId, |
288 | tt: &tt::Subtree, | 294 | tt: &tt::Subtree, |
289 | ) -> ExpandResult<Option<(tt::Subtree, FragmentKind)>> { | 295 | ) -> ExpandResult<Option<(tt::Subtree, FragmentKind)>> { |
296 | let mut err = None; | ||
290 | let mut text = String::new(); | 297 | let mut text = String::new(); |
291 | for (i, t) in tt.token_trees.iter().enumerate() { | 298 | for (i, t) in tt.token_trees.iter().enumerate() { |
292 | match t { | 299 | match t { |
293 | tt::TokenTree::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => { | 300 | tt::TokenTree::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => { |
294 | text += &match unquote_str(&it) { | 301 | // concat works with string and char literals, so remove any quotes. |
295 | Some(s) => s, | 302 | // It also works with integer, float and boolean literals, so just use the rest |
296 | None => { | 303 | // as-is. |
297 | return ExpandResult::only_err(mbe::ExpandError::ConversionError); | 304 | |
298 | } | 305 | text += it |
299 | }; | 306 | .text |
307 | .trim_start_matches(|c| match c { | ||
308 | 'r' | '#' | '\'' | '"' => true, | ||
309 | _ => false, | ||
310 | }) | ||
311 | .trim_end_matches(|c| match c { | ||
312 | '#' | '\'' | '"' => true, | ||
313 | _ => false, | ||
314 | }); | ||
300 | } | 315 | } |
301 | tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), | 316 | tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), |
302 | _ => return ExpandResult::only_err(mbe::ExpandError::UnexpectedToken), | 317 | _ => { |
318 | err.get_or_insert(mbe::ExpandError::UnexpectedToken); | ||
319 | } | ||
303 | } | 320 | } |
304 | } | 321 | } |
305 | 322 | ||
306 | ExpandResult::ok(Some((quote!(#text), FragmentKind::Expr))) | 323 | ExpandResult { value: Some((quote!(#text), FragmentKind::Expr)), err } |
307 | } | 324 | } |
308 | 325 | ||
309 | fn relative_file( | 326 | fn relative_file( |
@@ -313,7 +330,8 @@ fn relative_file( | |||
313 | allow_recursion: bool, | 330 | allow_recursion: bool, |
314 | ) -> Option<FileId> { | 331 | ) -> Option<FileId> { |
315 | let call_site = call_id.as_file().original_file(db); | 332 | let call_site = call_id.as_file().original_file(db); |
316 | let res = db.resolve_path(call_site, path)?; | 333 | let path = AnchoredPath { anchor: call_site, path }; |
334 | let res = db.resolve_path(path)?; | ||
317 | // Prevent include itself | 335 | // Prevent include itself |
318 | if res == call_site && !allow_recursion { | 336 | if res == call_site && !allow_recursion { |
319 | None | 337 | None |
@@ -686,4 +704,19 @@ mod tests { | |||
686 | 704 | ||
687 | assert_eq!(expanded, r#"b"""#); | 705 | assert_eq!(expanded, r#"b"""#); |
688 | } | 706 | } |
707 | |||
708 | #[test] | ||
709 | fn test_concat_expand() { | ||
710 | let expanded = expand_builtin_macro( | ||
711 | r##" | ||
712 | #[rustc_builtin_macro] | ||
713 | macro_rules! concat {} | ||
714 | concat!("foo", 0, r#"bar"#); | ||
715 | "##, | ||
716 | ); | ||
717 | |||
718 | assert_eq!(expanded, r#""foo0bar""#); | ||
719 | |||
720 | // FIXME: `true`/`false` literals don't work. | ||
721 | } | ||
689 | } | 722 | } |
diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index 2633fd8f7..1a9428514 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs | |||
@@ -20,11 +20,11 @@ pub use mbe::{ExpandError, ExpandResult}; | |||
20 | use std::hash::Hash; | 20 | use std::hash::Hash; |
21 | use std::sync::Arc; | 21 | use std::sync::Arc; |
22 | 22 | ||
23 | use base_db::{impl_intern_key, salsa, CrateId, FileId}; | 23 | use base_db::{impl_intern_key, salsa, CrateId, FileId, FileRange}; |
24 | use syntax::{ | 24 | use syntax::{ |
25 | algo, | 25 | algo::{self, skip_trivia_token}, |
26 | ast::{self, AstNode}, | 26 | ast::{self, AstNode}, |
27 | SyntaxNode, SyntaxToken, TextSize, | 27 | Direction, SyntaxNode, SyntaxToken, TextRange, TextSize, |
28 | }; | 28 | }; |
29 | 29 | ||
30 | use crate::ast_id_map::FileAstId; | 30 | use crate::ast_id_map::FileAstId; |
@@ -445,6 +445,70 @@ impl InFile<SyntaxNode> { | |||
445 | } | 445 | } |
446 | } | 446 | } |
447 | 447 | ||
448 | impl<'a> InFile<&'a SyntaxNode> { | ||
449 | pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange { | ||
450 | if let Some(range) = original_range_opt(db, self) { | ||
451 | let original_file = range.file_id.original_file(db); | ||
452 | if range.file_id == original_file.into() { | ||
453 | return FileRange { file_id: original_file, range: range.value }; | ||
454 | } | ||
455 | |||
456 | log::error!("Fail to mapping up more for {:?}", range); | ||
457 | return FileRange { file_id: range.file_id.original_file(db), range: range.value }; | ||
458 | } | ||
459 | |||
460 | // Fall back to whole macro call. | ||
461 | let mut node = self.cloned(); | ||
462 | while let Some(call_node) = node.file_id.call_node(db) { | ||
463 | node = call_node; | ||
464 | } | ||
465 | |||
466 | let orig_file = node.file_id.original_file(db); | ||
467 | assert_eq!(node.file_id, orig_file.into()); | ||
468 | FileRange { file_id: orig_file, range: node.value.text_range() } | ||
469 | } | ||
470 | } | ||
471 | |||
472 | fn original_range_opt( | ||
473 | db: &dyn db::AstDatabase, | ||
474 | node: InFile<&SyntaxNode>, | ||
475 | ) -> Option<InFile<TextRange>> { | ||
476 | let expansion = node.file_id.expansion_info(db)?; | ||
477 | |||
478 | // the input node has only one token ? | ||
479 | let single = skip_trivia_token(node.value.first_token()?, Direction::Next)? | ||
480 | == skip_trivia_token(node.value.last_token()?, Direction::Prev)?; | ||
481 | |||
482 | Some(node.value.descendants().find_map(|it| { | ||
483 | let first = skip_trivia_token(it.first_token()?, Direction::Next)?; | ||
484 | let first = ascend_call_token(db, &expansion, node.with_value(first))?; | ||
485 | |||
486 | let last = skip_trivia_token(it.last_token()?, Direction::Prev)?; | ||
487 | let last = ascend_call_token(db, &expansion, node.with_value(last))?; | ||
488 | |||
489 | if (!single && first == last) || (first.file_id != last.file_id) { | ||
490 | return None; | ||
491 | } | ||
492 | |||
493 | Some(first.with_value(first.value.text_range().cover(last.value.text_range()))) | ||
494 | })?) | ||
495 | } | ||
496 | |||
497 | fn ascend_call_token( | ||
498 | db: &dyn db::AstDatabase, | ||
499 | expansion: &ExpansionInfo, | ||
500 | token: InFile<SyntaxToken>, | ||
501 | ) -> Option<InFile<SyntaxToken>> { | ||
502 | let (mapped, origin) = expansion.map_token_up(token.as_ref())?; | ||
503 | if origin != Origin::Call { | ||
504 | return None; | ||
505 | } | ||
506 | if let Some(info) = mapped.file_id.expansion_info(db) { | ||
507 | return ascend_call_token(db, &info, mapped); | ||
508 | } | ||
509 | Some(mapped) | ||
510 | } | ||
511 | |||
448 | impl InFile<SyntaxToken> { | 512 | impl InFile<SyntaxToken> { |
449 | pub fn ancestors_with_macros( | 513 | pub fn ancestors_with_macros( |
450 | self, | 514 | self, |
diff --git a/crates/hir_expand/src/test_db.rs b/crates/hir_expand/src/test_db.rs index fca501e1f..7168a9462 100644 --- a/crates/hir_expand/src/test_db.rs +++ b/crates/hir_expand/src/test_db.rs | |||
@@ -5,7 +5,7 @@ use std::{ | |||
5 | sync::{Arc, Mutex}, | 5 | sync::{Arc, Mutex}, |
6 | }; | 6 | }; |
7 | 7 | ||
8 | use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate}; | 8 | use base_db::{salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate}; |
9 | use rustc_hash::FxHashSet; | 9 | use rustc_hash::FxHashSet; |
10 | 10 | ||
11 | #[salsa::database( | 11 | #[salsa::database( |
@@ -40,8 +40,8 @@ impl FileLoader for TestDB { | |||
40 | fn file_text(&self, file_id: FileId) -> Arc<String> { | 40 | fn file_text(&self, file_id: FileId) -> Arc<String> { |
41 | FileLoaderDelegate(self).file_text(file_id) | 41 | FileLoaderDelegate(self).file_text(file_id) |
42 | } | 42 | } |
43 | fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { | 43 | fn resolve_path(&self, path: AnchoredPath) -> Option<FileId> { |
44 | FileLoaderDelegate(self).resolve_path(anchor, path) | 44 | FileLoaderDelegate(self).resolve_path(path) |
45 | } | 45 | } |
46 | fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { | 46 | fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { |
47 | FileLoaderDelegate(self).relevant_crates(file_id) | 47 | FileLoaderDelegate(self).relevant_crates(file_id) |
diff --git a/crates/hir_ty/src/diagnostics/decl_check.rs b/crates/hir_ty/src/diagnostics/decl_check.rs index 4b3e2fa8f..25587e116 100644 --- a/crates/hir_ty/src/diagnostics/decl_check.rs +++ b/crates/hir_ty/src/diagnostics/decl_check.rs | |||
@@ -26,6 +26,7 @@ use syntax::{ | |||
26 | ast::{self, NameOwner}, | 26 | ast::{self, NameOwner}, |
27 | AstNode, AstPtr, | 27 | AstNode, AstPtr, |
28 | }; | 28 | }; |
29 | use test_utils::mark; | ||
29 | 30 | ||
30 | use crate::{ | 31 | use crate::{ |
31 | db::HirDatabase, | 32 | db::HirDatabase, |
@@ -87,6 +88,11 @@ impl<'a, 'b> DeclValidator<'a, 'b> { | |||
87 | 88 | ||
88 | fn validate_func(&mut self, db: &dyn HirDatabase, func: FunctionId) { | 89 | fn validate_func(&mut self, db: &dyn HirDatabase, func: FunctionId) { |
89 | let data = db.function_data(func); | 90 | let data = db.function_data(func); |
91 | if data.is_extern { | ||
92 | mark::hit!(extern_func_incorrect_case_ignored); | ||
93 | return; | ||
94 | } | ||
95 | |||
90 | let body = db.body(func.into()); | 96 | let body = db.body(func.into()); |
91 | 97 | ||
92 | // Recursively validate inner scope items, such as static variables and constants. | 98 | // Recursively validate inner scope items, such as static variables and constants. |
@@ -648,6 +654,10 @@ impl<'a, 'b> DeclValidator<'a, 'b> { | |||
648 | 654 | ||
649 | fn validate_static(&mut self, db: &dyn HirDatabase, static_id: StaticId) { | 655 | fn validate_static(&mut self, db: &dyn HirDatabase, static_id: StaticId) { |
650 | let data = db.static_data(static_id); | 656 | let data = db.static_data(static_id); |
657 | if data.is_extern { | ||
658 | mark::hit!(extern_static_incorrect_case_ignored); | ||
659 | return; | ||
660 | } | ||
651 | 661 | ||
652 | if self.allowed(db, static_id.into(), allow::NON_UPPER_CASE_GLOBAL) { | 662 | if self.allowed(db, static_id.into(), allow::NON_UPPER_CASE_GLOBAL) { |
653 | return; | 663 | return; |
@@ -709,6 +719,8 @@ fn pat_equals_to_name(pat: Option<ast::Pat>, name: &Name) -> bool { | |||
709 | 719 | ||
710 | #[cfg(test)] | 720 | #[cfg(test)] |
711 | mod tests { | 721 | mod tests { |
722 | use test_utils::mark; | ||
723 | |||
712 | use crate::diagnostics::tests::check_diagnostics; | 724 | use crate::diagnostics::tests::check_diagnostics; |
713 | 725 | ||
714 | #[test] | 726 | #[test] |
@@ -920,4 +932,18 @@ fn main() { | |||
920 | "#, | 932 | "#, |
921 | ); | 933 | ); |
922 | } | 934 | } |
935 | |||
936 | #[test] | ||
937 | fn ignores_extern_items() { | ||
938 | mark::check!(extern_func_incorrect_case_ignored); | ||
939 | mark::check!(extern_static_incorrect_case_ignored); | ||
940 | check_diagnostics( | ||
941 | r#" | ||
942 | extern { | ||
943 | fn NonSnakeCaseName(SOME_VAR: u8) -> u8; | ||
944 | pub static SomeStatic: u8 = 10; | ||
945 | } | ||
946 | "#, | ||
947 | ); | ||
948 | } | ||
923 | } | 949 | } |
diff --git a/crates/hir_ty/src/display.rs b/crates/hir_ty/src/display.rs index 14e8c0633..e77481906 100644 --- a/crates/hir_ty/src/display.rs +++ b/crates/hir_ty/src/display.rs | |||
@@ -297,26 +297,7 @@ impl HirDisplay for ApplicationTy { | |||
297 | } | 297 | } |
298 | TypeCtor::FnPtr { is_varargs, .. } => { | 298 | TypeCtor::FnPtr { is_varargs, .. } => { |
299 | let sig = FnSig::from_fn_ptr_substs(&self.parameters, is_varargs); | 299 | let sig = FnSig::from_fn_ptr_substs(&self.parameters, is_varargs); |
300 | write!(f, "fn(")?; | 300 | sig.hir_fmt(f)?; |
301 | f.write_joined(sig.params(), ", ")?; | ||
302 | if is_varargs { | ||
303 | if sig.params().is_empty() { | ||
304 | write!(f, "...")?; | ||
305 | } else { | ||
306 | write!(f, ", ...")?; | ||
307 | } | ||
308 | } | ||
309 | write!(f, ")")?; | ||
310 | let ret = sig.ret(); | ||
311 | if *ret != Ty::unit() { | ||
312 | let ret_display = ret.into_displayable( | ||
313 | f.db, | ||
314 | f.max_size, | ||
315 | f.omit_verbose_types, | ||
316 | f.display_target, | ||
317 | ); | ||
318 | write!(f, " -> {}", ret_display)?; | ||
319 | } | ||
320 | } | 301 | } |
321 | TypeCtor::FnDef(def) => { | 302 | TypeCtor::FnDef(def) => { |
322 | let sig = f.db.callable_item_signature(def).subst(&self.parameters); | 303 | let sig = f.db.callable_item_signature(def).subst(&self.parameters); |
@@ -584,6 +565,28 @@ impl HirDisplay for Ty { | |||
584 | } | 565 | } |
585 | } | 566 | } |
586 | 567 | ||
568 | impl HirDisplay for FnSig { | ||
569 | fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> { | ||
570 | write!(f, "fn(")?; | ||
571 | f.write_joined(self.params(), ", ")?; | ||
572 | if self.is_varargs { | ||
573 | if self.params().is_empty() { | ||
574 | write!(f, "...")?; | ||
575 | } else { | ||
576 | write!(f, ", ...")?; | ||
577 | } | ||
578 | } | ||
579 | write!(f, ")")?; | ||
580 | let ret = self.ret(); | ||
581 | if *ret != Ty::unit() { | ||
582 | let ret_display = | ||
583 | ret.into_displayable(f.db, f.max_size, f.omit_verbose_types, f.display_target); | ||
584 | write!(f, " -> {}", ret_display)?; | ||
585 | } | ||
586 | Ok(()) | ||
587 | } | ||
588 | } | ||
589 | |||
587 | fn write_bounds_like_dyn_trait( | 590 | fn write_bounds_like_dyn_trait( |
588 | predicates: &[GenericPredicate], | 591 | predicates: &[GenericPredicate], |
589 | f: &mut HirFormatter, | 592 | f: &mut HirFormatter, |
diff --git a/crates/hir_ty/src/test_db.rs b/crates/hir_ty/src/test_db.rs index 22254b765..646e16bbe 100644 --- a/crates/hir_ty/src/test_db.rs +++ b/crates/hir_ty/src/test_db.rs | |||
@@ -5,7 +5,9 @@ use std::{ | |||
5 | sync::{Arc, Mutex}, | 5 | sync::{Arc, Mutex}, |
6 | }; | 6 | }; |
7 | 7 | ||
8 | use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast}; | 8 | use base_db::{ |
9 | salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast, | ||
10 | }; | ||
9 | use hir_def::{db::DefDatabase, ModuleId}; | 11 | use hir_def::{db::DefDatabase, ModuleId}; |
10 | use hir_expand::db::AstDatabase; | 12 | use hir_expand::db::AstDatabase; |
11 | use rustc_hash::{FxHashMap, FxHashSet}; | 13 | use rustc_hash::{FxHashMap, FxHashSet}; |
@@ -67,8 +69,8 @@ impl FileLoader for TestDB { | |||
67 | fn file_text(&self, file_id: FileId) -> Arc<String> { | 69 | fn file_text(&self, file_id: FileId) -> Arc<String> { |
68 | FileLoaderDelegate(self).file_text(file_id) | 70 | FileLoaderDelegate(self).file_text(file_id) |
69 | } | 71 | } |
70 | fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { | 72 | fn resolve_path(&self, path: AnchoredPath) -> Option<FileId> { |
71 | FileLoaderDelegate(self).resolve_path(anchor, path) | 73 | FileLoaderDelegate(self).resolve_path(path) |
72 | } | 74 | } |
73 | fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { | 75 | fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { |
74 | FileLoaderDelegate(self).relevant_crates(file_id) | 76 | FileLoaderDelegate(self).relevant_crates(file_id) |
diff --git a/crates/ide/src/diagnostics.rs b/crates/ide/src/diagnostics.rs index 9157704dc..049f808dc 100644 --- a/crates/ide/src/diagnostics.rs +++ b/crates/ide/src/diagnostics.rs | |||
@@ -613,10 +613,12 @@ fn test_fn() { | |||
613 | source_file_edits: [], | 613 | source_file_edits: [], |
614 | file_system_edits: [ | 614 | file_system_edits: [ |
615 | CreateFile { | 615 | CreateFile { |
616 | anchor: FileId( | 616 | dst: AnchoredPathBuf { |
617 | 0, | 617 | anchor: FileId( |
618 | ), | 618 | 0, |
619 | dst: "foo.rs", | 619 | ), |
620 | path: "foo.rs", | ||
621 | }, | ||
620 | }, | 622 | }, |
621 | ], | 623 | ], |
622 | is_snippet: false, | 624 | is_snippet: false, |
diff --git a/crates/ide/src/diagnostics/fixes.rs b/crates/ide/src/diagnostics/fixes.rs index ba046232a..13240672f 100644 --- a/crates/ide/src/diagnostics/fixes.rs +++ b/crates/ide/src/diagnostics/fixes.rs | |||
@@ -6,9 +6,9 @@ use hir::{ | |||
6 | Diagnostic, IncorrectCase, MissingFields, MissingOkInTailExpr, NoSuchField, | 6 | Diagnostic, IncorrectCase, MissingFields, MissingOkInTailExpr, NoSuchField, |
7 | RemoveThisSemicolon, UnresolvedModule, | 7 | RemoveThisSemicolon, UnresolvedModule, |
8 | }, | 8 | }, |
9 | HasSource, HirDisplay, Semantics, VariantDef, | 9 | HasSource, HirDisplay, InFile, Semantics, VariantDef, |
10 | }; | 10 | }; |
11 | use ide_db::base_db::FileId; | 11 | use ide_db::base_db::{AnchoredPathBuf, FileId}; |
12 | use ide_db::{ | 12 | use ide_db::{ |
13 | source_change::{FileSystemEdit, SourceFileEdit}, | 13 | source_change::{FileSystemEdit, SourceFileEdit}, |
14 | RootDatabase, | 14 | RootDatabase, |
@@ -36,8 +36,10 @@ impl DiagnosticWithFix for UnresolvedModule { | |||
36 | Some(Fix::new( | 36 | Some(Fix::new( |
37 | "Create module", | 37 | "Create module", |
38 | FileSystemEdit::CreateFile { | 38 | FileSystemEdit::CreateFile { |
39 | anchor: self.file.original_file(sema.db), | 39 | dst: AnchoredPathBuf { |
40 | dst: self.candidate.clone(), | 40 | anchor: self.file.original_file(sema.db), |
41 | path: self.candidate.clone(), | ||
42 | }, | ||
41 | } | 43 | } |
42 | .into(), | 44 | .into(), |
43 | unresolved_module.syntax().text_range(), | 45 | unresolved_module.syntax().text_range(), |
@@ -68,7 +70,8 @@ impl DiagnosticWithFix for MissingFields { | |||
68 | } | 70 | } |
69 | 71 | ||
70 | let root = sema.db.parse_or_expand(self.file)?; | 72 | let root = sema.db.parse_or_expand(self.file)?; |
71 | let old_field_list = self.field_list_parent.to_node(&root).record_expr_field_list()?; | 73 | let field_list_parent = self.field_list_parent.to_node(&root); |
74 | let old_field_list = field_list_parent.record_expr_field_list()?; | ||
72 | let mut new_field_list = old_field_list.clone(); | 75 | let mut new_field_list = old_field_list.clone(); |
73 | for f in self.missed_fields.iter() { | 76 | for f in self.missed_fields.iter() { |
74 | let field = | 77 | let field = |
@@ -85,7 +88,7 @@ impl DiagnosticWithFix for MissingFields { | |||
85 | Some(Fix::new( | 88 | Some(Fix::new( |
86 | "Fill struct fields", | 89 | "Fill struct fields", |
87 | SourceFileEdit { file_id: self.file.original_file(sema.db), edit }.into(), | 90 | SourceFileEdit { file_id: self.file.original_file(sema.db), edit }.into(), |
88 | sema.original_range(&old_field_list.syntax()).range, | 91 | sema.original_range(&field_list_parent.syntax()).range, |
89 | )) | 92 | )) |
90 | } | 93 | } |
91 | } | 94 | } |
@@ -128,9 +131,9 @@ impl DiagnosticWithFix for IncorrectCase { | |||
128 | let root = sema.db.parse_or_expand(self.file)?; | 131 | let root = sema.db.parse_or_expand(self.file)?; |
129 | let name_node = self.ident.to_node(&root); | 132 | let name_node = self.ident.to_node(&root); |
130 | 133 | ||
131 | let file_id = self.file.original_file(sema.db); | 134 | let name_node = InFile::new(self.file, name_node.syntax()); |
132 | let offset = name_node.syntax().text_range().start(); | 135 | let frange = name_node.original_file_range(sema.db); |
133 | let file_position = FilePosition { file_id, offset }; | 136 | let file_position = FilePosition { file_id: frange.file_id, offset: frange.range.start() }; |
134 | 137 | ||
135 | let rename_changes = | 138 | let rename_changes = |
136 | rename_with_semantics(sema, file_position, &self.suggested_text).ok()?; | 139 | rename_with_semantics(sema, file_position, &self.suggested_text).ok()?; |
diff --git a/crates/ide/src/display/navigation_target.rs b/crates/ide/src/display/navigation_target.rs index 0c429a262..4790d648a 100644 --- a/crates/ide/src/display/navigation_target.rs +++ b/crates/ide/src/display/navigation_target.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use either::Either; | 3 | use either::Either; |
4 | use hir::{original_range, AssocItem, FieldSource, HasSource, InFile, ModuleSource}; | 4 | use hir::{AssocItem, FieldSource, HasSource, InFile, ModuleSource}; |
5 | use ide_db::base_db::{FileId, SourceDatabase}; | 5 | use ide_db::base_db::{FileId, SourceDatabase}; |
6 | use ide_db::{defs::Definition, RootDatabase}; | 6 | use ide_db::{defs::Definition, RootDatabase}; |
7 | use syntax::{ | 7 | use syntax::{ |
@@ -62,7 +62,8 @@ impl NavigationTarget { | |||
62 | pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget { | 62 | pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget { |
63 | let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default(); | 63 | let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default(); |
64 | if let Some(src) = module.declaration_source(db) { | 64 | if let Some(src) = module.declaration_source(db) { |
65 | let frange = original_range(db, src.as_ref().map(|it| it.syntax())); | 65 | let node = src.as_ref().map(|it| it.syntax()); |
66 | let frange = node.original_file_range(db); | ||
66 | let mut res = NavigationTarget::from_syntax( | 67 | let mut res = NavigationTarget::from_syntax( |
67 | frange.file_id, | 68 | frange.file_id, |
68 | name, | 69 | name, |
@@ -104,8 +105,8 @@ impl NavigationTarget { | |||
104 | let name = | 105 | let name = |
105 | node.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_")); | 106 | node.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_")); |
106 | let focus_range = | 107 | let focus_range = |
107 | node.value.name().map(|it| original_range(db, node.with_value(it.syntax())).range); | 108 | node.value.name().map(|it| node.with_value(it.syntax()).original_file_range(db).range); |
108 | let frange = original_range(db, node.map(|it| it.syntax())); | 109 | let frange = node.map(|it| it.syntax()).original_file_range(db); |
109 | 110 | ||
110 | NavigationTarget::from_syntax( | 111 | NavigationTarget::from_syntax( |
111 | frange.file_id, | 112 | frange.file_id, |
@@ -124,7 +125,7 @@ impl NavigationTarget { | |||
124 | ) -> NavigationTarget { | 125 | ) -> NavigationTarget { |
125 | let name = | 126 | let name = |
126 | named.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_")); | 127 | named.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_")); |
127 | let frange = original_range(db, node.map(|it| it.syntax())); | 128 | let frange = node.map(|it| it.syntax()).original_file_range(db); |
128 | 129 | ||
129 | NavigationTarget::from_syntax( | 130 | NavigationTarget::from_syntax( |
130 | frange.file_id, | 131 | frange.file_id, |
@@ -236,7 +237,7 @@ impl ToNav for hir::Module { | |||
236 | (node.syntax(), node.name().map(|it| it.syntax().text_range())) | 237 | (node.syntax(), node.name().map(|it| it.syntax().text_range())) |
237 | } | 238 | } |
238 | }; | 239 | }; |
239 | let frange = original_range(db, src.with_value(syntax)); | 240 | let frange = src.with_value(syntax).original_file_range(db); |
240 | NavigationTarget::from_syntax(frange.file_id, name, focus, frange.range, syntax.kind()) | 241 | NavigationTarget::from_syntax(frange.file_id, name, focus, frange.range, syntax.kind()) |
241 | } | 242 | } |
242 | } | 243 | } |
@@ -246,14 +247,14 @@ impl ToNav for hir::ImplDef { | |||
246 | let src = self.source(db); | 247 | let src = self.source(db); |
247 | let derive_attr = self.is_builtin_derive(db); | 248 | let derive_attr = self.is_builtin_derive(db); |
248 | let frange = if let Some(item) = &derive_attr { | 249 | let frange = if let Some(item) = &derive_attr { |
249 | original_range(db, item.syntax()) | 250 | item.syntax().original_file_range(db) |
250 | } else { | 251 | } else { |
251 | original_range(db, src.as_ref().map(|it| it.syntax())) | 252 | src.as_ref().map(|it| it.syntax()).original_file_range(db) |
252 | }; | 253 | }; |
253 | let focus_range = if derive_attr.is_some() { | 254 | let focus_range = if derive_attr.is_some() { |
254 | None | 255 | None |
255 | } else { | 256 | } else { |
256 | src.value.self_ty().map(|ty| original_range(db, src.with_value(ty.syntax())).range) | 257 | src.value.self_ty().map(|ty| src.with_value(ty.syntax()).original_file_range(db).range) |
257 | }; | 258 | }; |
258 | 259 | ||
259 | NavigationTarget::from_syntax( | 260 | NavigationTarget::from_syntax( |
@@ -278,7 +279,7 @@ impl ToNav for hir::Field { | |||
278 | res | 279 | res |
279 | } | 280 | } |
280 | FieldSource::Pos(it) => { | 281 | FieldSource::Pos(it) => { |
281 | let frange = original_range(db, src.with_value(it.syntax())); | 282 | let frange = src.with_value(it.syntax()).original_file_range(db); |
282 | NavigationTarget::from_syntax( | 283 | NavigationTarget::from_syntax( |
283 | frange.file_id, | 284 | frange.file_id, |
284 | "".into(), | 285 | "".into(), |
@@ -331,7 +332,7 @@ impl ToNav for hir::Local { | |||
331 | } | 332 | } |
332 | Either::Right(it) => it.syntax().clone(), | 333 | Either::Right(it) => it.syntax().clone(), |
333 | }; | 334 | }; |
334 | let full_range = original_range(db, src.with_value(&node)); | 335 | let full_range = src.with_value(&node).original_file_range(db); |
335 | let name = match self.name(db) { | 336 | let name = match self.name(db) { |
336 | Some(it) => it.to_string().into(), | 337 | Some(it) => it.to_string().into(), |
337 | None => "".into(), | 338 | None => "".into(), |
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 1b6ff6d21..cf04c3de0 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs | |||
@@ -3357,4 +3357,66 @@ impl Foo { | |||
3357 | "#]], | 3357 | "#]], |
3358 | ); | 3358 | ); |
3359 | } | 3359 | } |
3360 | |||
3361 | #[test] | ||
3362 | fn hover_doc_outer_inner() { | ||
3363 | check( | ||
3364 | r#" | ||
3365 | /// Be quick; | ||
3366 | mod Foo<|> { | ||
3367 | //! time is mana | ||
3368 | |||
3369 | /// This comment belongs to the function | ||
3370 | fn foo() {} | ||
3371 | } | ||
3372 | "#, | ||
3373 | expect![[r#" | ||
3374 | *Foo* | ||
3375 | |||
3376 | ```rust | ||
3377 | test | ||
3378 | ``` | ||
3379 | |||
3380 | ```rust | ||
3381 | mod Foo | ||
3382 | ``` | ||
3383 | |||
3384 | --- | ||
3385 | |||
3386 | Be quick; | ||
3387 | time is mana | ||
3388 | "#]], | ||
3389 | ); | ||
3390 | } | ||
3391 | |||
3392 | #[test] | ||
3393 | fn hover_doc_outer_inner_attribue() { | ||
3394 | check( | ||
3395 | r#" | ||
3396 | #[doc = "Be quick;"] | ||
3397 | mod Foo<|> { | ||
3398 | #![doc = "time is mana"] | ||
3399 | |||
3400 | #[doc = "This comment belongs to the function"] | ||
3401 | fn foo() {} | ||
3402 | } | ||
3403 | "#, | ||
3404 | expect![[r#" | ||
3405 | *Foo* | ||
3406 | |||
3407 | ```rust | ||
3408 | test | ||
3409 | ``` | ||
3410 | |||
3411 | ```rust | ||
3412 | mod Foo | ||
3413 | ``` | ||
3414 | |||
3415 | --- | ||
3416 | |||
3417 | Be quick; | ||
3418 | time is mana | ||
3419 | "#]], | ||
3420 | ); | ||
3421 | } | ||
3360 | } | 3422 | } |
diff --git a/crates/ide/src/references/rename.rs b/crates/ide/src/references/rename.rs index 64fe8bd65..44081f210 100644 --- a/crates/ide/src/references/rename.rs +++ b/crates/ide/src/references/rename.rs | |||
@@ -6,7 +6,7 @@ use std::{ | |||
6 | }; | 6 | }; |
7 | 7 | ||
8 | use hir::{Module, ModuleDef, ModuleSource, Semantics}; | 8 | use hir::{Module, ModuleDef, ModuleSource, Semantics}; |
9 | use ide_db::base_db::{FileRange, SourceDatabaseExt}; | 9 | use ide_db::base_db::{AnchoredPathBuf, FileRange, SourceDatabaseExt}; |
10 | use ide_db::{ | 10 | use ide_db::{ |
11 | defs::{Definition, NameClass, NameRefClass}, | 11 | defs::{Definition, NameClass, NameRefClass}, |
12 | RootDatabase, | 12 | RootDatabase, |
@@ -182,12 +182,13 @@ fn rename_mod( | |||
182 | match src.value { | 182 | match src.value { |
183 | ModuleSource::SourceFile(..) => { | 183 | ModuleSource::SourceFile(..) => { |
184 | // mod is defined in path/to/dir/mod.rs | 184 | // mod is defined in path/to/dir/mod.rs |
185 | let dst = if module.is_mod_rs(sema.db) { | 185 | let path = if module.is_mod_rs(sema.db) { |
186 | format!("../{}/mod.rs", new_name) | 186 | format!("../{}/mod.rs", new_name) |
187 | } else { | 187 | } else { |
188 | format!("{}.rs", new_name) | 188 | format!("{}.rs", new_name) |
189 | }; | 189 | }; |
190 | let move_file = FileSystemEdit::MoveFile { src: file_id, anchor: file_id, dst }; | 190 | let dst = AnchoredPathBuf { anchor: file_id, path }; |
191 | let move_file = FileSystemEdit::MoveFile { src: file_id, dst }; | ||
191 | file_system_edits.push(move_file); | 192 | file_system_edits.push(move_file); |
192 | } | 193 | } |
193 | ModuleSource::Module(..) => {} | 194 | ModuleSource::Module(..) => {} |
@@ -771,10 +772,12 @@ mod foo<|>; | |||
771 | src: FileId( | 772 | src: FileId( |
772 | 2, | 773 | 2, |
773 | ), | 774 | ), |
774 | anchor: FileId( | 775 | dst: AnchoredPathBuf { |
775 | 2, | 776 | anchor: FileId( |
776 | ), | 777 | 2, |
777 | dst: "foo2.rs", | 778 | ), |
779 | path: "foo2.rs", | ||
780 | }, | ||
778 | }, | 781 | }, |
779 | ], | 782 | ], |
780 | is_snippet: false, | 783 | is_snippet: false, |
@@ -837,10 +840,12 @@ use crate::foo<|>::FooContent; | |||
837 | src: FileId( | 840 | src: FileId( |
838 | 1, | 841 | 1, |
839 | ), | 842 | ), |
840 | anchor: FileId( | 843 | dst: AnchoredPathBuf { |
841 | 1, | 844 | anchor: FileId( |
842 | ), | 845 | 1, |
843 | dst: "quux.rs", | 846 | ), |
847 | path: "quux.rs", | ||
848 | }, | ||
844 | }, | 849 | }, |
845 | ], | 850 | ], |
846 | is_snippet: false, | 851 | is_snippet: false, |
@@ -884,10 +889,12 @@ mod fo<|>o; | |||
884 | src: FileId( | 889 | src: FileId( |
885 | 1, | 890 | 1, |
886 | ), | 891 | ), |
887 | anchor: FileId( | 892 | dst: AnchoredPathBuf { |
888 | 1, | 893 | anchor: FileId( |
889 | ), | 894 | 1, |
890 | dst: "../foo2/mod.rs", | 895 | ), |
896 | path: "../foo2/mod.rs", | ||
897 | }, | ||
891 | }, | 898 | }, |
892 | ], | 899 | ], |
893 | is_snippet: false, | 900 | is_snippet: false, |
@@ -932,10 +939,12 @@ mod outer { mod fo<|>o; } | |||
932 | src: FileId( | 939 | src: FileId( |
933 | 1, | 940 | 1, |
934 | ), | 941 | ), |
935 | anchor: FileId( | 942 | dst: AnchoredPathBuf { |
936 | 1, | 943 | anchor: FileId( |
937 | ), | 944 | 1, |
938 | dst: "bar.rs", | 945 | ), |
946 | path: "bar.rs", | ||
947 | }, | ||
939 | }, | 948 | }, |
940 | ], | 949 | ], |
941 | is_snippet: false, | 950 | is_snippet: false, |
@@ -1016,10 +1025,12 @@ pub mod foo<|>; | |||
1016 | src: FileId( | 1025 | src: FileId( |
1017 | 2, | 1026 | 2, |
1018 | ), | 1027 | ), |
1019 | anchor: FileId( | 1028 | dst: AnchoredPathBuf { |
1020 | 2, | 1029 | anchor: FileId( |
1021 | ), | 1030 | 2, |
1022 | dst: "foo2.rs", | 1031 | ), |
1032 | path: "foo2.rs", | ||
1033 | }, | ||
1023 | }, | 1034 | }, |
1024 | ], | 1035 | ], |
1025 | is_snippet: false, | 1036 | is_snippet: false, |
diff --git a/crates/ide_db/src/helpers/insert_use.rs b/crates/ide_db/src/helpers/insert_use.rs index 040843990..9be36d59b 100644 --- a/crates/ide_db/src/helpers/insert_use.rs +++ b/crates/ide_db/src/helpers/insert_use.rs | |||
@@ -93,7 +93,7 @@ fn is_inner_comment(token: SyntaxToken) -> bool { | |||
93 | pub fn insert_use<'a>( | 93 | pub fn insert_use<'a>( |
94 | scope: &ImportScope, | 94 | scope: &ImportScope, |
95 | path: ast::Path, | 95 | path: ast::Path, |
96 | merge: Option<MergeBehaviour>, | 96 | merge: Option<MergeBehavior>, |
97 | ) -> SyntaxRewriter<'a> { | 97 | ) -> SyntaxRewriter<'a> { |
98 | let _p = profile::span("insert_use"); | 98 | let _p = profile::span("insert_use"); |
99 | let mut rewriter = SyntaxRewriter::default(); | 99 | let mut rewriter = SyntaxRewriter::default(); |
@@ -183,7 +183,7 @@ fn eq_visibility(vis0: Option<ast::Visibility>, vis1: Option<ast::Visibility>) - | |||
183 | pub fn try_merge_imports( | 183 | pub fn try_merge_imports( |
184 | lhs: &ast::Use, | 184 | lhs: &ast::Use, |
185 | rhs: &ast::Use, | 185 | rhs: &ast::Use, |
186 | merge_behaviour: MergeBehaviour, | 186 | merge_behavior: MergeBehavior, |
187 | ) -> Option<ast::Use> { | 187 | ) -> Option<ast::Use> { |
188 | // don't merge imports with different visibilities | 188 | // don't merge imports with different visibilities |
189 | if !eq_visibility(lhs.visibility(), rhs.visibility()) { | 189 | if !eq_visibility(lhs.visibility(), rhs.visibility()) { |
@@ -191,14 +191,14 @@ pub fn try_merge_imports( | |||
191 | } | 191 | } |
192 | let lhs_tree = lhs.use_tree()?; | 192 | let lhs_tree = lhs.use_tree()?; |
193 | let rhs_tree = rhs.use_tree()?; | 193 | let rhs_tree = rhs.use_tree()?; |
194 | let merged = try_merge_trees(&lhs_tree, &rhs_tree, merge_behaviour)?; | 194 | let merged = try_merge_trees(&lhs_tree, &rhs_tree, merge_behavior)?; |
195 | Some(lhs.with_use_tree(merged)) | 195 | Some(lhs.with_use_tree(merged)) |
196 | } | 196 | } |
197 | 197 | ||
198 | pub fn try_merge_trees( | 198 | pub fn try_merge_trees( |
199 | lhs: &ast::UseTree, | 199 | lhs: &ast::UseTree, |
200 | rhs: &ast::UseTree, | 200 | rhs: &ast::UseTree, |
201 | merge: MergeBehaviour, | 201 | merge: MergeBehavior, |
202 | ) -> Option<ast::UseTree> { | 202 | ) -> Option<ast::UseTree> { |
203 | let lhs_path = lhs.path()?; | 203 | let lhs_path = lhs.path()?; |
204 | let rhs_path = rhs.path()?; | 204 | let rhs_path = rhs.path()?; |
@@ -220,7 +220,7 @@ pub fn try_merge_trees( | |||
220 | fn recursive_merge( | 220 | fn recursive_merge( |
221 | lhs: &ast::UseTree, | 221 | lhs: &ast::UseTree, |
222 | rhs: &ast::UseTree, | 222 | rhs: &ast::UseTree, |
223 | merge: MergeBehaviour, | 223 | merge: MergeBehavior, |
224 | ) -> Option<ast::UseTree> { | 224 | ) -> Option<ast::UseTree> { |
225 | let mut use_trees = lhs | 225 | let mut use_trees = lhs |
226 | .use_tree_list() | 226 | .use_tree_list() |
@@ -301,7 +301,7 @@ fn recursive_merge( | |||
301 | } | 301 | } |
302 | } | 302 | } |
303 | Err(_) | 303 | Err(_) |
304 | if merge == MergeBehaviour::Last | 304 | if merge == MergeBehavior::Last |
305 | && use_trees.len() > 0 | 305 | && use_trees.len() > 0 |
306 | && rhs_t.use_tree_list().is_some() => | 306 | && rhs_t.use_tree_list().is_some() => |
307 | { | 307 | { |
@@ -438,20 +438,20 @@ fn path_segment_cmp(a: &ast::PathSegment, b: &ast::PathSegment) -> Ordering { | |||
438 | 438 | ||
439 | /// What type of merges are allowed. | 439 | /// What type of merges are allowed. |
440 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] | 440 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] |
441 | pub enum MergeBehaviour { | 441 | pub enum MergeBehavior { |
442 | /// Merge everything together creating deeply nested imports. | 442 | /// Merge everything together creating deeply nested imports. |
443 | Full, | 443 | Full, |
444 | /// Only merge the last import level, doesn't allow import nesting. | 444 | /// Only merge the last import level, doesn't allow import nesting. |
445 | Last, | 445 | Last, |
446 | } | 446 | } |
447 | 447 | ||
448 | impl MergeBehaviour { | 448 | impl MergeBehavior { |
449 | #[inline] | 449 | #[inline] |
450 | fn is_tree_allowed(&self, tree: &ast::UseTree) -> bool { | 450 | fn is_tree_allowed(&self, tree: &ast::UseTree) -> bool { |
451 | match self { | 451 | match self { |
452 | MergeBehaviour::Full => true, | 452 | MergeBehavior::Full => true, |
453 | // only simple single segment paths are allowed | 453 | // only simple single segment paths are allowed |
454 | MergeBehaviour::Last => { | 454 | MergeBehavior::Last => { |
455 | tree.use_tree_list().is_none() && tree.path().map(path_len) <= Some(1) | 455 | tree.use_tree_list().is_none() && tree.path().map(path_len) <= Some(1) |
456 | } | 456 | } |
457 | } | 457 | } |
diff --git a/crates/ide_db/src/helpers/insert_use/tests.rs b/crates/ide_db/src/helpers/insert_use/tests.rs index 86bfa5b41..9e194354e 100644 --- a/crates/ide_db/src/helpers/insert_use/tests.rs +++ b/crates/ide_db/src/helpers/insert_use/tests.rs | |||
@@ -533,7 +533,7 @@ fn merge_last_fail() { | |||
533 | check_merge_only_fail( | 533 | check_merge_only_fail( |
534 | r"use foo::bar::{baz::{Qux, Fez}};", | 534 | r"use foo::bar::{baz::{Qux, Fez}};", |
535 | r"use foo::bar::{baaz::{Quux, Feez}};", | 535 | r"use foo::bar::{baaz::{Quux, Feez}};", |
536 | MergeBehaviour::Last, | 536 | MergeBehavior::Last, |
537 | ); | 537 | ); |
538 | } | 538 | } |
539 | 539 | ||
@@ -542,7 +542,7 @@ fn merge_last_fail1() { | |||
542 | check_merge_only_fail( | 542 | check_merge_only_fail( |
543 | r"use foo::bar::{baz::{Qux, Fez}};", | 543 | r"use foo::bar::{baz::{Qux, Fez}};", |
544 | r"use foo::bar::baaz::{Quux, Feez};", | 544 | r"use foo::bar::baaz::{Quux, Feez};", |
545 | MergeBehaviour::Last, | 545 | MergeBehavior::Last, |
546 | ); | 546 | ); |
547 | } | 547 | } |
548 | 548 | ||
@@ -551,7 +551,7 @@ fn merge_last_fail2() { | |||
551 | check_merge_only_fail( | 551 | check_merge_only_fail( |
552 | r"use foo::bar::baz::{Qux, Fez};", | 552 | r"use foo::bar::baz::{Qux, Fez};", |
553 | r"use foo::bar::{baaz::{Quux, Feez}};", | 553 | r"use foo::bar::{baaz::{Quux, Feez}};", |
554 | MergeBehaviour::Last, | 554 | MergeBehavior::Last, |
555 | ); | 555 | ); |
556 | } | 556 | } |
557 | 557 | ||
@@ -560,7 +560,7 @@ fn merge_last_fail3() { | |||
560 | check_merge_only_fail( | 560 | check_merge_only_fail( |
561 | r"use foo::bar::baz::{Qux, Fez};", | 561 | r"use foo::bar::baz::{Qux, Fez};", |
562 | r"use foo::bar::baaz::{Quux, Feez};", | 562 | r"use foo::bar::baaz::{Quux, Feez};", |
563 | MergeBehaviour::Last, | 563 | MergeBehavior::Last, |
564 | ); | 564 | ); |
565 | } | 565 | } |
566 | 566 | ||
@@ -568,7 +568,7 @@ fn check( | |||
568 | path: &str, | 568 | path: &str, |
569 | ra_fixture_before: &str, | 569 | ra_fixture_before: &str, |
570 | ra_fixture_after: &str, | 570 | ra_fixture_after: &str, |
571 | mb: Option<MergeBehaviour>, | 571 | mb: Option<MergeBehavior>, |
572 | module: bool, | 572 | module: bool, |
573 | ) { | 573 | ) { |
574 | let mut syntax = ast::SourceFile::parse(ra_fixture_before).tree().syntax().clone(); | 574 | let mut syntax = ast::SourceFile::parse(ra_fixture_before).tree().syntax().clone(); |
@@ -589,18 +589,18 @@ fn check( | |||
589 | } | 589 | } |
590 | 590 | ||
591 | fn check_full(path: &str, ra_fixture_before: &str, ra_fixture_after: &str) { | 591 | fn check_full(path: &str, ra_fixture_before: &str, ra_fixture_after: &str) { |
592 | check(path, ra_fixture_before, ra_fixture_after, Some(MergeBehaviour::Full), false) | 592 | check(path, ra_fixture_before, ra_fixture_after, Some(MergeBehavior::Full), false) |
593 | } | 593 | } |
594 | 594 | ||
595 | fn check_last(path: &str, ra_fixture_before: &str, ra_fixture_after: &str) { | 595 | fn check_last(path: &str, ra_fixture_before: &str, ra_fixture_after: &str) { |
596 | check(path, ra_fixture_before, ra_fixture_after, Some(MergeBehaviour::Last), false) | 596 | check(path, ra_fixture_before, ra_fixture_after, Some(MergeBehavior::Last), false) |
597 | } | 597 | } |
598 | 598 | ||
599 | fn check_none(path: &str, ra_fixture_before: &str, ra_fixture_after: &str) { | 599 | fn check_none(path: &str, ra_fixture_before: &str, ra_fixture_after: &str) { |
600 | check(path, ra_fixture_before, ra_fixture_after, None, false) | 600 | check(path, ra_fixture_before, ra_fixture_after, None, false) |
601 | } | 601 | } |
602 | 602 | ||
603 | fn check_merge_only_fail(ra_fixture0: &str, ra_fixture1: &str, mb: MergeBehaviour) { | 603 | fn check_merge_only_fail(ra_fixture0: &str, ra_fixture1: &str, mb: MergeBehavior) { |
604 | let use0 = ast::SourceFile::parse(ra_fixture0) | 604 | let use0 = ast::SourceFile::parse(ra_fixture0) |
605 | .tree() | 605 | .tree() |
606 | .syntax() | 606 | .syntax() |
diff --git a/crates/ide_db/src/lib.rs b/crates/ide_db/src/lib.rs index fceaa089a..118c090d7 100644 --- a/crates/ide_db/src/lib.rs +++ b/crates/ide_db/src/lib.rs | |||
@@ -19,8 +19,8 @@ use std::{fmt, sync::Arc}; | |||
19 | 19 | ||
20 | use base_db::{ | 20 | use base_db::{ |
21 | salsa::{self, Durability}, | 21 | salsa::{self, Durability}, |
22 | Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, | 22 | AnchoredPath, Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, |
23 | Upcast, | 23 | SourceDatabase, Upcast, |
24 | }; | 24 | }; |
25 | use hir::db::{AstDatabase, DefDatabase, HirDatabase}; | 25 | use hir::db::{AstDatabase, DefDatabase, HirDatabase}; |
26 | use rustc_hash::FxHashSet; | 26 | use rustc_hash::FxHashSet; |
@@ -72,8 +72,8 @@ impl FileLoader for RootDatabase { | |||
72 | fn file_text(&self, file_id: FileId) -> Arc<String> { | 72 | fn file_text(&self, file_id: FileId) -> Arc<String> { |
73 | FileLoaderDelegate(self).file_text(file_id) | 73 | FileLoaderDelegate(self).file_text(file_id) |
74 | } | 74 | } |
75 | fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { | 75 | fn resolve_path(&self, path: AnchoredPath) -> Option<FileId> { |
76 | FileLoaderDelegate(self).resolve_path(anchor, path) | 76 | FileLoaderDelegate(self).resolve_path(path) |
77 | } | 77 | } |
78 | fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { | 78 | fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { |
79 | FileLoaderDelegate(self).relevant_crates(file_id) | 79 | FileLoaderDelegate(self).relevant_crates(file_id) |
diff --git a/crates/ide_db/src/source_change.rs b/crates/ide_db/src/source_change.rs index f1590ec66..e87d98dad 100644 --- a/crates/ide_db/src/source_change.rs +++ b/crates/ide_db/src/source_change.rs | |||
@@ -3,7 +3,7 @@ | |||
3 | //! | 3 | //! |
4 | //! It can be viewed as a dual for `AnalysisChange`. | 4 | //! It can be viewed as a dual for `AnalysisChange`. |
5 | 5 | ||
6 | use base_db::FileId; | 6 | use base_db::{AnchoredPathBuf, FileId}; |
7 | use text_edit::TextEdit; | 7 | use text_edit::TextEdit; |
8 | 8 | ||
9 | #[derive(Default, Debug, Clone)] | 9 | #[derive(Default, Debug, Clone)] |
@@ -44,8 +44,8 @@ impl From<Vec<SourceFileEdit>> for SourceChange { | |||
44 | 44 | ||
45 | #[derive(Debug, Clone)] | 45 | #[derive(Debug, Clone)] |
46 | pub enum FileSystemEdit { | 46 | pub enum FileSystemEdit { |
47 | CreateFile { anchor: FileId, dst: String }, | 47 | CreateFile { dst: AnchoredPathBuf }, |
48 | MoveFile { src: FileId, anchor: FileId, dst: String }, | 48 | MoveFile { src: FileId, dst: AnchoredPathBuf }, |
49 | } | 49 | } |
50 | 50 | ||
51 | impl From<FileSystemEdit> for SourceChange { | 51 | impl From<FileSystemEdit> for SourceChange { |
diff --git a/crates/proc_macro_srv/src/lib.rs b/crates/proc_macro_srv/src/lib.rs index 6e890f8e2..9cca96994 100644 --- a/crates/proc_macro_srv/src/lib.rs +++ b/crates/proc_macro_srv/src/lib.rs | |||
@@ -40,7 +40,8 @@ impl ProcMacroSrv { | |||
40 | match expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref()) { | 40 | match expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref()) { |
41 | Ok(expansion) => Ok(ExpansionResult { expansion }), | 41 | Ok(expansion) => Ok(ExpansionResult { expansion }), |
42 | Err(msg) => { | 42 | Err(msg) => { |
43 | Err(format!("Cannot perform expansion for {}: error {:?}", &task.macro_name, msg)) | 43 | let msg = msg.as_str().unwrap_or("<unknown error>"); |
44 | Err(format!("proc-macro panicked: {}", msg)) | ||
44 | } | 45 | } |
45 | } | 46 | } |
46 | } | 47 | } |
diff --git a/crates/proc_macro_srv/src/rustc_server.rs b/crates/proc_macro_srv/src/rustc_server.rs index c5fe3591e..503f4c101 100644 --- a/crates/proc_macro_srv/src/rustc_server.rs +++ b/crates/proc_macro_srv/src/rustc_server.rs | |||
@@ -184,8 +184,7 @@ pub mod token_stream { | |||
184 | let (subtree, _token_map) = | 184 | let (subtree, _token_map) = |
185 | mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?; | 185 | mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?; |
186 | 186 | ||
187 | let tt: tt::TokenTree = subtree.into(); | 187 | Ok(TokenStream { subtree }) |
188 | Ok(tt.into()) | ||
189 | } | 188 | } |
190 | } | 189 | } |
191 | 190 | ||
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 0a055b039..039976e4b 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml | |||
@@ -27,7 +27,7 @@ pico-args = "0.3.1" | |||
27 | oorandom = "11.1.2" | 27 | oorandom = "11.1.2" |
28 | rustc-hash = "1.1.0" | 28 | rustc-hash = "1.1.0" |
29 | serde = { version = "1.0.106", features = ["derive"] } | 29 | serde = { version = "1.0.106", features = ["derive"] } |
30 | serde_json = "1.0.48" | 30 | serde_json = { version = "1.0.48", features = ["preserve_order"] } |
31 | threadpool = "1.7.1" | 31 | threadpool = "1.7.1" |
32 | rayon = "1.5" | 32 | rayon = "1.5" |
33 | mimalloc = { version = "0.1.19", default-features = false, optional = true } | 33 | mimalloc = { version = "0.1.19", default-features = false, optional = true } |
diff --git a/crates/rust-analyzer/src/bin/args.rs b/crates/rust-analyzer/src/bin/args.rs index 8ddf1e031..0a471154e 100644 --- a/crates/rust-analyzer/src/bin/args.rs +++ b/crates/rust-analyzer/src/bin/args.rs | |||
@@ -28,6 +28,7 @@ pub(crate) enum Command { | |||
28 | StructuredSearch { debug_snippet: Option<String>, patterns: Vec<SsrPattern> }, | 28 | StructuredSearch { debug_snippet: Option<String>, patterns: Vec<SsrPattern> }, |
29 | ProcMacro, | 29 | ProcMacro, |
30 | RunServer, | 30 | RunServer, |
31 | PrintConfigSchema, | ||
31 | Version, | 32 | Version, |
32 | Help, | 33 | Help, |
33 | } | 34 | } |
@@ -135,6 +136,10 @@ impl Args { | |||
135 | return Ok(Args { verbosity, log_file: None, command: Command::Help }); | 136 | return Ok(Args { verbosity, log_file: None, command: Command::Help }); |
136 | } | 137 | } |
137 | 138 | ||
139 | if matches.contains("--print-config-schema") { | ||
140 | return Ok(Args { verbosity, log_file, command: Command::PrintConfigSchema }); | ||
141 | } | ||
142 | |||
138 | let subcommand = match matches.subcommand()? { | 143 | let subcommand = match matches.subcommand()? { |
139 | Some(it) => it, | 144 | Some(it) => it, |
140 | None => { | 145 | None => { |
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 21fba8302..defdcbd74 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs | |||
@@ -31,6 +31,9 @@ fn try_main() -> Result<()> { | |||
31 | setup_logging(args.log_file)?; | 31 | setup_logging(args.log_file)?; |
32 | match args.command { | 32 | match args.command { |
33 | args::Command::RunServer => run_server()?, | 33 | args::Command::RunServer => run_server()?, |
34 | args::Command::PrintConfigSchema => { | ||
35 | println!("{:#}", Config::json_schema()); | ||
36 | } | ||
34 | args::Command::ProcMacro => proc_macro_srv::cli::run()?, | 37 | args::Command::ProcMacro => proc_macro_srv::cli::run()?, |
35 | 38 | ||
36 | args::Command::Parse { no_dump } => cli::parse(no_dump)?, | 39 | args::Command::Parse { no_dump } => cli::parse(no_dump)?, |
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 98ef0cd68..58d284d47 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs | |||
@@ -8,7 +8,7 @@ use std::{ | |||
8 | 8 | ||
9 | use hir::{ | 9 | use hir::{ |
10 | db::{AstDatabase, DefDatabase, HirDatabase}, | 10 | db::{AstDatabase, DefDatabase, HirDatabase}, |
11 | original_range, AssocItem, Crate, HasSource, HirDisplay, ModuleDef, | 11 | AssocItem, Crate, HasSource, HirDisplay, ModuleDef, |
12 | }; | 12 | }; |
13 | use hir_def::FunctionId; | 13 | use hir_def::FunctionId; |
14 | use hir_ty::{Ty, TypeWalk}; | 14 | use hir_ty::{Ty, TypeWalk}; |
@@ -232,7 +232,7 @@ impl AnalysisStatsCmd { | |||
232 | // But also, we should just turn the type mismatches into diagnostics and provide these | 232 | // But also, we should just turn the type mismatches into diagnostics and provide these |
233 | let root = db.parse_or_expand(src.file_id).unwrap(); | 233 | let root = db.parse_or_expand(src.file_id).unwrap(); |
234 | let node = src.map(|e| e.to_node(&root).syntax().clone()); | 234 | let node = src.map(|e| e.to_node(&root).syntax().clone()); |
235 | let original_range = original_range(db, node.as_ref()); | 235 | let original_range = node.as_ref().original_file_range(db); |
236 | let path = vfs.file_path(original_range.file_id); | 236 | let path = vfs.file_path(original_range.file_id); |
237 | let line_index = | 237 | let line_index = |
238 | host.analysis().file_line_index(original_range.file_id).unwrap(); | 238 | host.analysis().file_line_index(original_range.file_id).unwrap(); |
diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs index d51f4a93a..e5ab6c73b 100644 --- a/crates/rust-analyzer/src/cli/load_cargo.rs +++ b/crates/rust-analyzer/src/cli/load_cargo.rs | |||
@@ -73,7 +73,7 @@ fn load( | |||
73 | } | 73 | } |
74 | vfs::loader::Message::Loaded { files } => { | 74 | vfs::loader::Message::Loaded { files } => { |
75 | for (path, contents) in files { | 75 | for (path, contents) in files { |
76 | vfs.set_file_contents(path.into(), contents) | 76 | vfs.set_file_contents(path.into(), contents); |
77 | } | 77 | } |
78 | } | 78 | } |
79 | } | 79 | } |
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 5243b50c8..1f4b5c24c 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs | |||
@@ -12,15 +12,167 @@ use std::{convert::TryFrom, ffi::OsString, path::PathBuf}; | |||
12 | use flycheck::FlycheckConfig; | 12 | use flycheck::FlycheckConfig; |
13 | use hir::PrefixKind; | 13 | use hir::PrefixKind; |
14 | use ide::{AssistConfig, CompletionConfig, DiagnosticsConfig, HoverConfig, InlayHintsConfig}; | 14 | use ide::{AssistConfig, CompletionConfig, DiagnosticsConfig, HoverConfig, InlayHintsConfig}; |
15 | use ide_db::helpers::insert_use::MergeBehaviour; | 15 | use ide_db::helpers::insert_use::MergeBehavior; |
16 | use itertools::Itertools; | ||
16 | use lsp_types::{ClientCapabilities, MarkupKind}; | 17 | use lsp_types::{ClientCapabilities, MarkupKind}; |
17 | use project_model::{CargoConfig, ProjectJson, ProjectJsonData, ProjectManifest}; | 18 | use project_model::{CargoConfig, ProjectJson, ProjectJsonData, ProjectManifest}; |
18 | use rustc_hash::FxHashSet; | 19 | use rustc_hash::FxHashSet; |
19 | use serde::Deserialize; | 20 | use serde::{de::DeserializeOwned, Deserialize}; |
20 | use vfs::AbsPathBuf; | 21 | use vfs::AbsPathBuf; |
21 | 22 | ||
22 | use crate::{caps::enabled_completions_resolve_capabilities, diagnostics::DiagnosticsMapConfig}; | 23 | use crate::{caps::enabled_completions_resolve_capabilities, diagnostics::DiagnosticsMapConfig}; |
23 | 24 | ||
25 | config_data! { | ||
26 | struct ConfigData { | ||
27 | /// The strategy to use when inserting new imports or merging imports. | ||
28 | assist_importMergeBehaviour: MergeBehaviorDef = "\"full\"", | ||
29 | /// The path structure for newly inserted paths to use. | ||
30 | assist_importPrefix: ImportPrefixDef = "\"plain\"", | ||
31 | |||
32 | /// Show function name and docs in parameter hints. | ||
33 | callInfo_full: bool = "true", | ||
34 | |||
35 | /// Automatically refresh project info via `cargo metadata` on | ||
36 | /// Cargo.toml changes. | ||
37 | cargo_autoreload: bool = "true", | ||
38 | /// Activate all available features. | ||
39 | cargo_allFeatures: bool = "false", | ||
40 | /// List of features to activate. | ||
41 | cargo_features: Vec<String> = "[]", | ||
42 | /// Run `cargo check` on startup to get the correct value for package | ||
43 | /// OUT_DIRs. | ||
44 | cargo_loadOutDirsFromCheck: bool = "false", | ||
45 | /// Do not activate the `default` feature. | ||
46 | cargo_noDefaultFeatures: bool = "false", | ||
47 | /// Compilation target (target triple). | ||
48 | cargo_target: Option<String> = "null", | ||
49 | /// Internal config for debugging, disables loading of sysroot crates. | ||
50 | cargo_noSysroot: bool = "false", | ||
51 | |||
52 | /// Run specified `cargo check` command for diagnostics on save. | ||
53 | checkOnSave_enable: bool = "true", | ||
54 | /// Check with all features (will be passed as `--all-features`). | ||
55 | /// Defaults to `rust-analyzer.cargo.allFeatures`. | ||
56 | checkOnSave_allFeatures: Option<bool> = "null", | ||
57 | /// Check all targets and tests (will be passed as `--all-targets`). | ||
58 | checkOnSave_allTargets: bool = "true", | ||
59 | /// Cargo command to use for `cargo check`. | ||
60 | checkOnSave_command: String = "\"check\"", | ||
61 | /// Do not activate the `default` feature. | ||
62 | checkOnSave_noDefaultFeatures: Option<bool> = "null", | ||
63 | /// Check for a specific target. Defaults to | ||
64 | /// `rust-analyzer.cargo.target`. | ||
65 | checkOnSave_target: Option<String> = "null", | ||
66 | /// Extra arguments for `cargo check`. | ||
67 | checkOnSave_extraArgs: Vec<String> = "[]", | ||
68 | /// List of features to activate. Defaults to | ||
69 | /// `rust-analyzer.cargo.features`. | ||
70 | checkOnSave_features: Option<Vec<String>> = "null", | ||
71 | /// Advanced option, fully override the command rust-analyzer uses for | ||
72 | /// checking. The command should include `--message-format=json` or | ||
73 | /// similar option. | ||
74 | checkOnSave_overrideCommand: Option<Vec<String>> = "null", | ||
75 | |||
76 | /// Whether to add argument snippets when completing functions. | ||
77 | completion_addCallArgumentSnippets: bool = "true", | ||
78 | /// Whether to add parenthesis when completing functions. | ||
79 | completion_addCallParenthesis: bool = "true", | ||
80 | /// Whether to show postfix snippets like `dbg`, `if`, `not`, etc. | ||
81 | completion_postfix_enable: bool = "true", | ||
82 | /// Toggles the additional completions that automatically add imports when completed. | ||
83 | /// Note that your client have to specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. | ||
84 | completion_autoimport_enable: bool = "true", | ||
85 | |||
86 | /// Whether to show native rust-analyzer diagnostics. | ||
87 | diagnostics_enable: bool = "true", | ||
88 | /// Whether to show experimental rust-analyzer diagnostics that might | ||
89 | /// have more false positives than usual. | ||
90 | diagnostics_enableExperimental: bool = "true", | ||
91 | /// List of rust-analyzer diagnostics to disable. | ||
92 | diagnostics_disabled: FxHashSet<String> = "[]", | ||
93 | /// List of warnings that should be displayed with info severity.\nThe | ||
94 | /// warnings will be indicated by a blue squiggly underline in code and | ||
95 | /// a blue icon in the problems panel. | ||
96 | diagnostics_warningsAsHint: Vec<String> = "[]", | ||
97 | /// List of warnings that should be displayed with hint severity.\nThe | ||
98 | /// warnings will be indicated by faded text or three dots in code and | ||
99 | /// will not show up in the problems panel. | ||
100 | diagnostics_warningsAsInfo: Vec<String> = "[]", | ||
101 | |||
102 | /// Controls file watching implementation. | ||
103 | files_watcher: String = "\"client\"", | ||
104 | |||
105 | /// Whether to show `Debug` action. Only applies when | ||
106 | /// `#rust-analyzer.hoverActions.enable#` is set. | ||
107 | hoverActions_debug: bool = "true", | ||
108 | /// Whether to show HoverActions in Rust files. | ||
109 | hoverActions_enable: bool = "true", | ||
110 | /// Whether to show `Go to Type Definition` action. Only applies when | ||
111 | /// `#rust-analyzer.hoverActions.enable#` is set. | ||
112 | hoverActions_gotoTypeDef: bool = "true", | ||
113 | /// Whether to show `Implementations` action. Only applies when | ||
114 | /// `#rust-analyzer.hoverActions.enable#` is set. | ||
115 | hoverActions_implementations: bool = "true", | ||
116 | /// Whether to show `Run` action. Only applies when | ||
117 | /// `#rust-analyzer.hoverActions.enable#` is set. | ||
118 | hoverActions_run: bool = "true", | ||
119 | /// Use markdown syntax for links in hover. | ||
120 | hoverActions_linksInHover: bool = "true", | ||
121 | |||
122 | /// Whether to show inlay type hints for method chains. | ||
123 | inlayHints_chainingHints: bool = "true", | ||
124 | /// Maximum length for inlay hints. | ||
125 | inlayHints_maxLength: Option<usize> = "null", | ||
126 | /// Whether to show function parameter name inlay hints at the call | ||
127 | /// site. | ||
128 | inlayHints_parameterHints: bool = "true", | ||
129 | /// Whether to show inlay type hints for variables. | ||
130 | inlayHints_typeHints: bool = "true", | ||
131 | |||
132 | /// Whether to show `Debug` lens. Only applies when | ||
133 | /// `#rust-analyzer.lens.enable#` is set. | ||
134 | lens_debug: bool = "true", | ||
135 | /// Whether to show CodeLens in Rust files. | ||
136 | lens_enable: bool = "true", | ||
137 | /// Whether to show `Implementations` lens. Only applies when | ||
138 | /// `#rust-analyzer.lens.enable#` is set. | ||
139 | lens_implementations: bool = "true", | ||
140 | /// Whether to show `Run` lens. Only applies when | ||
141 | /// `#rust-analyzer.lens.enable#` is set. | ||
142 | lens_run: bool = "true", | ||
143 | /// Whether to show `Method References` lens. Only applies when | ||
144 | /// `#rust-analyzer.lens.enable#` is set. | ||
145 | lens_methodReferences: bool = "false", | ||
146 | |||
147 | /// Disable project auto-discovery in favor of explicitly specified set | ||
148 | /// of projects. \nElements must be paths pointing to Cargo.toml, | ||
149 | /// rust-project.json, or JSON objects in rust-project.json format. | ||
150 | linkedProjects: Vec<ManifestOrProjectJson> = "[]", | ||
151 | /// Number of syntax trees rust-analyzer keeps in memory. | ||
152 | lruCapacity: Option<usize> = "null", | ||
153 | /// Whether to show `can't find Cargo.toml` error message. | ||
154 | notifications_cargoTomlNotFound: bool = "true", | ||
155 | /// Enable Proc macro support, cargo.loadOutDirsFromCheck must be | ||
156 | /// enabled. | ||
157 | procMacro_enable: bool = "false", | ||
158 | |||
159 | /// Command to be executed instead of 'cargo' for runnables. | ||
160 | runnables_overrideCargo: Option<String> = "null", | ||
161 | /// Additional arguments to be passed to cargo for runnables such as | ||
162 | /// tests or binaries.\nFor example, it may be '--release'. | ||
163 | runnables_cargoExtraArgs: Vec<String> = "[]", | ||
164 | |||
165 | /// Path to the rust compiler sources, for usage in rustc_private projects. | ||
166 | rustcSource : Option<String> = "null", | ||
167 | |||
168 | /// Additional arguments to rustfmt. | ||
169 | rustfmt_extraArgs: Vec<String> = "[]", | ||
170 | /// Advanced option, fully override the command rust-analyzer uses for | ||
171 | /// formatting. | ||
172 | rustfmt_overrideCommand: Option<Vec<String>> = "null", | ||
173 | } | ||
174 | } | ||
175 | |||
24 | #[derive(Debug, Clone)] | 176 | #[derive(Debug, Clone)] |
25 | pub struct Config { | 177 | pub struct Config { |
26 | pub client_caps: ClientCapsConfig, | 178 | pub client_caps: ClientCapsConfig, |
@@ -46,6 +198,7 @@ pub struct Config { | |||
46 | pub lens: LensConfig, | 198 | pub lens: LensConfig, |
47 | pub hover: HoverConfig, | 199 | pub hover: HoverConfig, |
48 | pub semantic_tokens_refresh: bool, | 200 | pub semantic_tokens_refresh: bool, |
201 | pub code_lens_refresh: bool, | ||
49 | 202 | ||
50 | pub linked_projects: Vec<LinkedProject>, | 203 | pub linked_projects: Vec<LinkedProject>, |
51 | pub root_path: AbsPathBuf, | 204 | pub root_path: AbsPathBuf, |
@@ -149,25 +302,27 @@ pub struct ClientCapsConfig { | |||
149 | 302 | ||
150 | impl Config { | 303 | impl Config { |
151 | pub fn new(root_path: AbsPathBuf) -> Self { | 304 | pub fn new(root_path: AbsPathBuf) -> Self { |
152 | Config { | 305 | // Defaults here don't matter, we'll immediately re-write them with |
306 | // ConfigData. | ||
307 | let mut res = Config { | ||
153 | client_caps: ClientCapsConfig::default(), | 308 | client_caps: ClientCapsConfig::default(), |
154 | 309 | ||
155 | publish_diagnostics: true, | 310 | publish_diagnostics: false, |
156 | diagnostics: DiagnosticsConfig::default(), | 311 | diagnostics: DiagnosticsConfig::default(), |
157 | diagnostics_map: DiagnosticsMapConfig::default(), | 312 | diagnostics_map: DiagnosticsMapConfig::default(), |
158 | lru_capacity: None, | 313 | lru_capacity: None, |
159 | proc_macro_srv: None, | 314 | proc_macro_srv: None, |
160 | files: FilesConfig { watcher: FilesWatcher::Notify, exclude: Vec::new() }, | 315 | files: FilesConfig { watcher: FilesWatcher::Notify, exclude: Vec::new() }, |
161 | notifications: NotificationsConfig { cargo_toml_not_found: true }, | 316 | notifications: NotificationsConfig { cargo_toml_not_found: false }, |
162 | 317 | ||
163 | cargo_autoreload: true, | 318 | cargo_autoreload: false, |
164 | cargo: CargoConfig::default(), | 319 | cargo: CargoConfig::default(), |
165 | rustfmt: RustfmtConfig::Rustfmt { extra_args: Vec::new() }, | 320 | rustfmt: RustfmtConfig::Rustfmt { extra_args: Vec::new() }, |
166 | flycheck: Some(FlycheckConfig::CargoCommand { | 321 | flycheck: Some(FlycheckConfig::CargoCommand { |
167 | command: "check".to_string(), | 322 | command: String::new(), |
168 | target_triple: None, | 323 | target_triple: None, |
169 | no_default_features: false, | 324 | no_default_features: false, |
170 | all_targets: true, | 325 | all_targets: false, |
171 | all_features: false, | 326 | all_features: false, |
172 | extra_args: Vec::new(), | 327 | extra_args: Vec::new(), |
173 | features: Vec::new(), | 328 | features: Vec::new(), |
@@ -175,35 +330,33 @@ impl Config { | |||
175 | runnables: RunnablesConfig::default(), | 330 | runnables: RunnablesConfig::default(), |
176 | 331 | ||
177 | inlay_hints: InlayHintsConfig { | 332 | inlay_hints: InlayHintsConfig { |
178 | type_hints: true, | 333 | type_hints: false, |
179 | parameter_hints: true, | 334 | parameter_hints: false, |
180 | chaining_hints: true, | 335 | chaining_hints: false, |
181 | max_length: None, | 336 | max_length: None, |
182 | }, | 337 | }, |
183 | completion: CompletionConfig { | 338 | completion: CompletionConfig::default(), |
184 | enable_postfix_completions: true, | ||
185 | enable_autoimport_completions: true, | ||
186 | add_call_parenthesis: true, | ||
187 | add_call_argument_snippets: true, | ||
188 | ..CompletionConfig::default() | ||
189 | }, | ||
190 | assist: AssistConfig::default(), | 339 | assist: AssistConfig::default(), |
191 | call_info_full: true, | 340 | call_info_full: false, |
192 | lens: LensConfig::default(), | 341 | lens: LensConfig::default(), |
193 | hover: HoverConfig::default(), | 342 | hover: HoverConfig::default(), |
194 | semantic_tokens_refresh: false, | 343 | semantic_tokens_refresh: false, |
344 | code_lens_refresh: false, | ||
195 | linked_projects: Vec::new(), | 345 | linked_projects: Vec::new(), |
196 | root_path, | 346 | root_path, |
197 | } | 347 | }; |
348 | res.do_update(serde_json::json!({})); | ||
349 | res | ||
198 | } | 350 | } |
199 | |||
200 | pub fn update(&mut self, json: serde_json::Value) { | 351 | pub fn update(&mut self, json: serde_json::Value) { |
201 | log::info!("Config::update({:#})", json); | 352 | log::info!("Config::update({:#})", json); |
202 | |||
203 | if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) { | 353 | if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) { |
204 | return; | 354 | return; |
205 | } | 355 | } |
206 | 356 | self.do_update(json); | |
357 | log::info!("Config::update() = {:#?}", self); | ||
358 | } | ||
359 | fn do_update(&mut self, json: serde_json::Value) { | ||
207 | let data = ConfigData::from_json(json); | 360 | let data = ConfigData::from_json(json); |
208 | 361 | ||
209 | self.publish_diagnostics = data.diagnostics_enable; | 362 | self.publish_diagnostics = data.diagnostics_enable; |
@@ -294,9 +447,9 @@ impl Config { | |||
294 | }; | 447 | }; |
295 | 448 | ||
296 | self.assist.insert_use.merge = match data.assist_importMergeBehaviour { | 449 | self.assist.insert_use.merge = match data.assist_importMergeBehaviour { |
297 | MergeBehaviourDef::None => None, | 450 | MergeBehaviorDef::None => None, |
298 | MergeBehaviourDef::Full => Some(MergeBehaviour::Full), | 451 | MergeBehaviorDef::Full => Some(MergeBehavior::Full), |
299 | MergeBehaviourDef::Last => Some(MergeBehaviour::Last), | 452 | MergeBehaviorDef::Last => Some(MergeBehavior::Last), |
300 | }; | 453 | }; |
301 | self.assist.insert_use.prefix_kind = match data.assist_importPrefix { | 454 | self.assist.insert_use.prefix_kind = match data.assist_importPrefix { |
302 | ImportPrefixDef::Plain => PrefixKind::Plain, | 455 | ImportPrefixDef::Plain => PrefixKind::Plain, |
@@ -349,8 +502,6 @@ impl Config { | |||
349 | links_in_hover: data.hoverActions_linksInHover, | 502 | links_in_hover: data.hoverActions_linksInHover, |
350 | markdown: true, | 503 | markdown: true, |
351 | }; | 504 | }; |
352 | |||
353 | log::info!("Config::update() = {:#?}", self); | ||
354 | } | 505 | } |
355 | 506 | ||
356 | pub fn update_caps(&mut self, caps: &ClientCapabilities) { | 507 | pub fn update_caps(&mut self, caps: &ClientCapabilities) { |
@@ -432,8 +583,18 @@ impl Config { | |||
432 | { | 583 | { |
433 | self.semantic_tokens_refresh = refresh_support; | 584 | self.semantic_tokens_refresh = refresh_support; |
434 | } | 585 | } |
586 | |||
587 | if let Some(refresh_support) = | ||
588 | workspace_caps.code_lens.as_ref().and_then(|it| it.refresh_support) | ||
589 | { | ||
590 | self.code_lens_refresh = refresh_support; | ||
591 | } | ||
435 | } | 592 | } |
436 | } | 593 | } |
594 | |||
595 | pub fn json_schema() -> serde_json::Value { | ||
596 | ConfigData::json_schema() | ||
597 | } | ||
437 | } | 598 | } |
438 | 599 | ||
439 | #[derive(Deserialize)] | 600 | #[derive(Deserialize)] |
@@ -445,7 +606,7 @@ enum ManifestOrProjectJson { | |||
445 | 606 | ||
446 | #[derive(Deserialize)] | 607 | #[derive(Deserialize)] |
447 | #[serde(rename_all = "snake_case")] | 608 | #[serde(rename_all = "snake_case")] |
448 | enum MergeBehaviourDef { | 609 | enum MergeBehaviorDef { |
449 | None, | 610 | None, |
450 | Full, | 611 | Full, |
451 | Last, | 612 | Last, |
@@ -459,94 +620,206 @@ enum ImportPrefixDef { | |||
459 | ByCrate, | 620 | ByCrate, |
460 | } | 621 | } |
461 | 622 | ||
462 | macro_rules! config_data { | 623 | macro_rules! _config_data { |
463 | (struct $name:ident { $($field:ident: $ty:ty = $default:expr,)*}) => { | 624 | (struct $name:ident { |
625 | $( | ||
626 | $(#[doc=$doc:literal])* | ||
627 | $field:ident: $ty:ty = $default:expr, | ||
628 | )* | ||
629 | }) => { | ||
464 | #[allow(non_snake_case)] | 630 | #[allow(non_snake_case)] |
465 | struct $name { $($field: $ty,)* } | 631 | struct $name { $($field: $ty,)* } |
466 | impl $name { | 632 | impl $name { |
467 | fn from_json(mut json: serde_json::Value) -> $name { | 633 | fn from_json(mut json: serde_json::Value) -> $name { |
468 | $name {$( | 634 | $name {$( |
469 | $field: { | 635 | $field: get_field(&mut json, stringify!($field), $default), |
470 | let pointer = stringify!($field).replace('_', "/"); | ||
471 | let pointer = format!("/{}", pointer); | ||
472 | json.pointer_mut(&pointer) | ||
473 | .and_then(|it| serde_json::from_value(it.take()).ok()) | ||
474 | .unwrap_or($default) | ||
475 | }, | ||
476 | )*} | 636 | )*} |
477 | } | 637 | } |
478 | } | ||
479 | 638 | ||
639 | fn json_schema() -> serde_json::Value { | ||
640 | schema(&[ | ||
641 | $({ | ||
642 | let field = stringify!($field); | ||
643 | let ty = stringify!($ty); | ||
644 | (field, ty, &[$($doc),*], $default) | ||
645 | },)* | ||
646 | ]) | ||
647 | } | ||
648 | |||
649 | #[cfg(test)] | ||
650 | fn manual() -> String { | ||
651 | manual(&[ | ||
652 | $({ | ||
653 | let field = stringify!($field); | ||
654 | let ty = stringify!($ty); | ||
655 | (field, ty, &[$($doc),*], $default) | ||
656 | },)* | ||
657 | ]) | ||
658 | } | ||
659 | } | ||
480 | }; | 660 | }; |
481 | } | 661 | } |
662 | use _config_data as config_data; | ||
663 | |||
664 | fn get_field<T: DeserializeOwned>( | ||
665 | json: &mut serde_json::Value, | ||
666 | field: &'static str, | ||
667 | default: &str, | ||
668 | ) -> T { | ||
669 | let default = serde_json::from_str(default).unwrap(); | ||
670 | |||
671 | let mut pointer = field.replace('_', "/"); | ||
672 | pointer.insert(0, '/'); | ||
673 | json.pointer_mut(&pointer) | ||
674 | .and_then(|it| serde_json::from_value(it.take()).ok()) | ||
675 | .unwrap_or(default) | ||
676 | } | ||
482 | 677 | ||
483 | config_data! { | 678 | fn schema(fields: &[(&'static str, &'static str, &[&str], &str)]) -> serde_json::Value { |
484 | struct ConfigData { | 679 | for ((f1, ..), (f2, ..)) in fields.iter().zip(&fields[1..]) { |
485 | assist_importMergeBehaviour: MergeBehaviourDef = MergeBehaviourDef::Full, | 680 | fn key(f: &str) -> &str { |
486 | assist_importPrefix: ImportPrefixDef = ImportPrefixDef::Plain, | 681 | f.splitn(2, "_").next().unwrap() |
487 | 682 | }; | |
488 | callInfo_full: bool = true, | 683 | assert!(key(f1) <= key(f2), "wrong field order: {:?} {:?}", f1, f2); |
489 | 684 | } | |
490 | cargo_autoreload: bool = true, | 685 | |
491 | cargo_allFeatures: bool = false, | 686 | let map = fields |
492 | cargo_features: Vec<String> = Vec::new(), | 687 | .iter() |
493 | cargo_loadOutDirsFromCheck: bool = false, | 688 | .map(|(field, ty, doc, default)| { |
494 | cargo_noDefaultFeatures: bool = false, | 689 | let name = field.replace("_", "."); |
495 | cargo_target: Option<String> = None, | 690 | let name = format!("rust-analyzer.{}", name); |
496 | cargo_noSysroot: bool = false, | 691 | let props = field_props(field, ty, doc, default); |
497 | 692 | (name, props) | |
498 | checkOnSave_enable: bool = true, | 693 | }) |
499 | checkOnSave_allFeatures: Option<bool> = None, | 694 | .collect::<serde_json::Map<_, _>>(); |
500 | checkOnSave_allTargets: bool = true, | 695 | map.into() |
501 | checkOnSave_command: String = "check".into(), | 696 | } |
502 | checkOnSave_noDefaultFeatures: Option<bool> = None, | 697 | |
503 | checkOnSave_target: Option<String> = None, | 698 | fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json::Value { |
504 | checkOnSave_extraArgs: Vec<String> = Vec::new(), | 699 | let doc = doc.iter().map(|it| it.trim()).join(" "); |
505 | checkOnSave_features: Option<Vec<String>> = None, | 700 | assert!( |
506 | checkOnSave_overrideCommand: Option<Vec<String>> = None, | 701 | doc.ends_with('.') && doc.starts_with(char::is_uppercase), |
507 | 702 | "bad docs for {}: {:?}", | |
508 | completion_addCallArgumentSnippets: bool = true, | 703 | field, |
509 | completion_addCallParenthesis: bool = true, | 704 | doc |
510 | completion_postfix_enable: bool = true, | 705 | ); |
511 | completion_autoimport_enable: bool = true, | 706 | let default = default.parse::<serde_json::Value>().unwrap(); |
512 | 707 | ||
513 | diagnostics_enable: bool = true, | 708 | let mut map = serde_json::Map::default(); |
514 | diagnostics_enableExperimental: bool = true, | 709 | macro_rules! set { |
515 | diagnostics_disabled: FxHashSet<String> = FxHashSet::default(), | 710 | ($($key:literal: $value:tt),*$(,)?) => {{$( |
516 | diagnostics_warningsAsHint: Vec<String> = Vec::new(), | 711 | map.insert($key.into(), serde_json::json!($value)); |
517 | diagnostics_warningsAsInfo: Vec<String> = Vec::new(), | 712 | )*}}; |
518 | 713 | } | |
519 | files_watcher: String = "client".into(), | 714 | set!("markdownDescription": doc); |
520 | 715 | set!("default": default); | |
521 | hoverActions_debug: bool = true, | 716 | |
522 | hoverActions_enable: bool = true, | 717 | match ty { |
523 | hoverActions_gotoTypeDef: bool = true, | 718 | "bool" => set!("type": "boolean"), |
524 | hoverActions_implementations: bool = true, | 719 | "String" => set!("type": "string"), |
525 | hoverActions_run: bool = true, | 720 | "Vec<String>" => set! { |
526 | hoverActions_linksInHover: bool = true, | 721 | "type": "array", |
527 | 722 | "items": { "type": "string" }, | |
528 | inlayHints_chainingHints: bool = true, | 723 | }, |
529 | inlayHints_maxLength: Option<usize> = None, | 724 | "FxHashSet<String>" => set! { |
530 | inlayHints_parameterHints: bool = true, | 725 | "type": "array", |
531 | inlayHints_typeHints: bool = true, | 726 | "items": { "type": "string" }, |
532 | 727 | "uniqueItems": true, | |
533 | lens_debug: bool = true, | 728 | }, |
534 | lens_enable: bool = true, | 729 | "Option<usize>" => set! { |
535 | lens_implementations: bool = true, | 730 | "type": ["null", "integer"], |
536 | lens_run: bool = true, | 731 | "minimum": 0, |
537 | lens_methodReferences: bool = false, | 732 | }, |
538 | 733 | "Option<String>" => set! { | |
539 | linkedProjects: Vec<ManifestOrProjectJson> = Vec::new(), | 734 | "type": ["null", "string"], |
540 | lruCapacity: Option<usize> = None, | 735 | }, |
541 | notifications_cargoTomlNotFound: bool = true, | 736 | "Option<bool>" => set! { |
542 | procMacro_enable: bool = false, | 737 | "type": ["null", "boolean"], |
543 | 738 | }, | |
544 | runnables_overrideCargo: Option<String> = None, | 739 | "Option<Vec<String>>" => set! { |
545 | runnables_cargoExtraArgs: Vec<String> = Vec::new(), | 740 | "type": ["null", "array"], |
546 | 741 | "items": { "type": "string" }, | |
547 | rustfmt_extraArgs: Vec<String> = Vec::new(), | 742 | }, |
548 | rustfmt_overrideCommand: Option<Vec<String>> = None, | 743 | "MergeBehaviorDef" => set! { |
549 | 744 | "type": "string", | |
550 | rustcSource : Option<String> = None, | 745 | "enum": ["none", "full", "last"], |
746 | "enumDescriptions": [ | ||
747 | "No merging", | ||
748 | "Merge all layers of the import trees", | ||
749 | "Only merge the last layer of the import trees" | ||
750 | ], | ||
751 | }, | ||
752 | "ImportPrefixDef" => set! { | ||
753 | "type": "string", | ||
754 | "enum": [ | ||
755 | "plain", | ||
756 | "by_self", | ||
757 | "by_crate" | ||
758 | ], | ||
759 | "enumDescriptions": [ | ||
760 | "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item.", | ||
761 | "Prefix all import paths with `self` if they don't begin with `self`, `super`, `crate` or a crate name", | ||
762 | "Force import paths to be absolute by always starting them with `crate` or the crate name they refer to." | ||
763 | ], | ||
764 | }, | ||
765 | "Vec<ManifestOrProjectJson>" => set! { | ||
766 | "type": "array", | ||
767 | "items": { "type": ["string", "object"] }, | ||
768 | }, | ||
769 | _ => panic!("{}: {}", ty, default), | ||
770 | } | ||
771 | |||
772 | map.into() | ||
773 | } | ||
774 | |||
775 | #[cfg(test)] | ||
776 | fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String { | ||
777 | fields | ||
778 | .iter() | ||
779 | .map(|(field, _ty, doc, default)| { | ||
780 | let name = field.replace("_", "."); | ||
781 | let name = format!("rust-analyzer.{} (default: `{}`)", name, default); | ||
782 | format!("{}::\n{}\n", name, doc.join(" ")) | ||
783 | }) | ||
784 | .collect::<String>() | ||
785 | } | ||
786 | |||
787 | #[cfg(test)] | ||
788 | mod tests { | ||
789 | use std::fs; | ||
790 | |||
791 | use test_utils::project_dir; | ||
792 | |||
793 | use super::*; | ||
794 | |||
795 | #[test] | ||
796 | fn schema_in_sync_with_package_json() { | ||
797 | let s = Config::json_schema(); | ||
798 | let schema = format!("{:#}", s); | ||
799 | let schema = schema.trim_start_matches('{').trim_end_matches('}'); | ||
800 | |||
801 | let package_json = project_dir().join("editors/code/package.json"); | ||
802 | let package_json = fs::read_to_string(&package_json).unwrap(); | ||
803 | |||
804 | let p = remove_ws(&package_json); | ||
805 | let s = remove_ws(&schema); | ||
806 | |||
807 | assert!(p.contains(&s), "update config in package.json. New config:\n{:#}", schema); | ||
808 | } | ||
809 | |||
810 | #[test] | ||
811 | fn schema_in_sync_with_docs() { | ||
812 | let docs_path = project_dir().join("docs/user/generated_config.adoc"); | ||
813 | let current = fs::read_to_string(&docs_path).unwrap(); | ||
814 | let expected = ConfigData::manual(); | ||
815 | |||
816 | if remove_ws(¤t) != remove_ws(&expected) { | ||
817 | fs::write(&docs_path, expected).unwrap(); | ||
818 | panic!("updated config manual"); | ||
819 | } | ||
820 | } | ||
821 | |||
822 | fn remove_ws(text: &str) -> String { | ||
823 | text.replace(char::is_whitespace, "") | ||
551 | } | 824 | } |
552 | } | 825 | } |
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index a27495d0d..71dc56915 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs | |||
@@ -13,6 +13,7 @@ use lsp_types::{SemanticTokens, Url}; | |||
13 | use parking_lot::{Mutex, RwLock}; | 13 | use parking_lot::{Mutex, RwLock}; |
14 | use project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target}; | 14 | use project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target}; |
15 | use rustc_hash::FxHashMap; | 15 | use rustc_hash::FxHashMap; |
16 | use vfs::AnchoredPathBuf; | ||
16 | 17 | ||
17 | use crate::{ | 18 | use crate::{ |
18 | config::Config, | 19 | config::Config, |
@@ -268,10 +269,10 @@ impl GlobalStateSnapshot { | |||
268 | Some(self.mem_docs.get(&path)?.version) | 269 | Some(self.mem_docs.get(&path)?.version) |
269 | } | 270 | } |
270 | 271 | ||
271 | pub(crate) fn anchored_path(&self, file_id: FileId, path: &str) -> Url { | 272 | pub(crate) fn anchored_path(&self, path: &AnchoredPathBuf) -> Url { |
272 | let mut base = self.vfs.read().0.file_path(file_id); | 273 | let mut base = self.vfs.read().0.file_path(path.anchor); |
273 | base.pop(); | 274 | base.pop(); |
274 | let path = base.join(path).unwrap(); | 275 | let path = base.join(&path.path).unwrap(); |
275 | let path = path.as_path().unwrap(); | 276 | let path = path.as_path().unwrap(); |
276 | url_from_abs_path(&path) | 277 | url_from_abs_path(&path) |
277 | } | 278 | } |
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 95be2ebd3..a5f7647b8 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs | |||
@@ -255,7 +255,7 @@ impl GlobalState { | |||
255 | for (path, contents) in files { | 255 | for (path, contents) in files { |
256 | let path = VfsPath::from(path); | 256 | let path = VfsPath::from(path); |
257 | if !self.mem_docs.contains_key(&path) { | 257 | if !self.mem_docs.contains_key(&path) { |
258 | vfs.set_file_contents(path, contents) | 258 | vfs.set_file_contents(path, contents); |
259 | } | 259 | } |
260 | } | 260 | } |
261 | } | 261 | } |
@@ -369,6 +369,11 @@ impl GlobalState { | |||
369 | self.semantic_tokens_cache.lock().clear(); | 369 | self.semantic_tokens_cache.lock().clear(); |
370 | self.send_request::<lsp_types::request::SemanticTokensRefesh>((), |_, _| ()); | 370 | self.send_request::<lsp_types::request::SemanticTokensRefesh>((), |_, _| ()); |
371 | } | 371 | } |
372 | |||
373 | // Refresh code lens if the client supports it. | ||
374 | if self.config.code_lens_refresh { | ||
375 | self.send_request::<lsp_types::request::CodeLensRefresh>((), |_, _| ()); | ||
376 | } | ||
372 | } | 377 | } |
373 | 378 | ||
374 | if let Some(diagnostic_changes) = self.diagnostics.take_changes() { | 379 | if let Some(diagnostic_changes) = self.diagnostics.take_changes() { |
@@ -503,11 +508,21 @@ impl GlobalState { | |||
503 | { | 508 | { |
504 | log::error!("duplicate DidOpenTextDocument: {}", path) | 509 | log::error!("duplicate DidOpenTextDocument: {}", path) |
505 | } | 510 | } |
506 | this.vfs | 511 | let changed = this |
512 | .vfs | ||
507 | .write() | 513 | .write() |
508 | .0 | 514 | .0 |
509 | .set_file_contents(path, Some(params.text_document.text.into_bytes())); | 515 | .set_file_contents(path, Some(params.text_document.text.into_bytes())); |
510 | this.maybe_update_diagnostics(); | 516 | |
517 | // If the VFS contents are unchanged, update diagnostics, since `handle_event` | ||
518 | // won't see any changes. This avoids missing diagnostics when opening a file. | ||
519 | // | ||
520 | // If the file *was* changed, `handle_event` will already recompute and send | ||
521 | // diagnostics. We can't do it here, since the *current* file contents might be | ||
522 | // unset in salsa, since the VFS change hasn't been applied to the database yet. | ||
523 | if !changed { | ||
524 | this.maybe_update_diagnostics(); | ||
525 | } | ||
511 | } | 526 | } |
512 | Ok(()) | 527 | Ok(()) |
513 | })? | 528 | })? |
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 01eabe852..715f8927a 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs | |||
@@ -628,17 +628,17 @@ pub(crate) fn resource_op( | |||
628 | file_system_edit: FileSystemEdit, | 628 | file_system_edit: FileSystemEdit, |
629 | ) -> lsp_types::ResourceOp { | 629 | ) -> lsp_types::ResourceOp { |
630 | match file_system_edit { | 630 | match file_system_edit { |
631 | FileSystemEdit::CreateFile { anchor, dst } => { | 631 | FileSystemEdit::CreateFile { dst } => { |
632 | let uri = snap.anchored_path(anchor, &dst); | 632 | let uri = snap.anchored_path(&dst); |
633 | lsp_types::ResourceOp::Create(lsp_types::CreateFile { | 633 | lsp_types::ResourceOp::Create(lsp_types::CreateFile { |
634 | uri, | 634 | uri, |
635 | options: None, | 635 | options: None, |
636 | annotation: None, | 636 | annotation: None, |
637 | }) | 637 | }) |
638 | } | 638 | } |
639 | FileSystemEdit::MoveFile { src, anchor, dst } => { | 639 | FileSystemEdit::MoveFile { src, dst } => { |
640 | let old_uri = snap.file_id_to_url(src); | 640 | let old_uri = snap.file_id_to_url(src); |
641 | let new_uri = snap.anchored_path(anchor, &dst); | 641 | let new_uri = snap.anchored_path(&dst); |
642 | lsp_types::ResourceOp::Rename(lsp_types::RenameFile { | 642 | lsp_types::ResourceOp::Rename(lsp_types::RenameFile { |
643 | old_uri, | 643 | old_uri, |
644 | new_uri, | 644 | new_uri, |
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index a10b14778..52b7285dd 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs | |||
@@ -17,6 +17,14 @@ impl ast::Comment { | |||
17 | CommentKind::from_text(self.text()) | 17 | CommentKind::from_text(self.text()) |
18 | } | 18 | } |
19 | 19 | ||
20 | pub fn is_inner(&self) -> bool { | ||
21 | self.kind().doc == Some(CommentPlacement::Inner) | ||
22 | } | ||
23 | |||
24 | pub fn is_outer(&self) -> bool { | ||
25 | self.kind().doc == Some(CommentPlacement::Outer) | ||
26 | } | ||
27 | |||
20 | pub fn prefix(&self) -> &'static str { | 28 | pub fn prefix(&self) -> &'static str { |
21 | let &(prefix, _kind) = CommentKind::BY_PREFIX | 29 | let &(prefix, _kind) = CommentKind::BY_PREFIX |
22 | .iter() | 30 | .iter() |
diff --git a/crates/test_utils/src/lib.rs b/crates/test_utils/src/lib.rs index a49be4602..05940a546 100644 --- a/crates/test_utils/src/lib.rs +++ b/crates/test_utils/src/lib.rs | |||
@@ -321,12 +321,11 @@ fn lines_match_works() { | |||
321 | /// as paths). You can use a `"{...}"` string literal as a wildcard for | 321 | /// as paths). You can use a `"{...}"` string literal as a wildcard for |
322 | /// arbitrary nested JSON. Arrays are sorted before comparison. | 322 | /// arbitrary nested JSON. Arrays are sorted before comparison. |
323 | pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a Value, &'a Value)> { | 323 | pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a Value, &'a Value)> { |
324 | use serde_json::Value::*; | ||
325 | match (expected, actual) { | 324 | match (expected, actual) { |
326 | (&Number(ref l), &Number(ref r)) if l == r => None, | 325 | (Value::Number(l), Value::Number(r)) if l == r => None, |
327 | (&Bool(l), &Bool(r)) if l == r => None, | 326 | (Value::Bool(l), Value::Bool(r)) if l == r => None, |
328 | (&String(ref l), &String(ref r)) if lines_match(l, r) => None, | 327 | (Value::String(l), Value::String(r)) if lines_match(l, r) => None, |
329 | (&Array(ref l), &Array(ref r)) => { | 328 | (Value::Array(l), Value::Array(r)) => { |
330 | if l.len() != r.len() { | 329 | if l.len() != r.len() { |
331 | return Some((expected, actual)); | 330 | return Some((expected, actual)); |
332 | } | 331 | } |
@@ -350,17 +349,26 @@ pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a | |||
350 | None | 349 | None |
351 | } | 350 | } |
352 | } | 351 | } |
353 | (&Object(ref l), &Object(ref r)) => { | 352 | (Value::Object(l), Value::Object(r)) => { |
353 | fn sorted_values(obj: &serde_json::Map<String, Value>) -> Vec<&Value> { | ||
354 | let mut entries = obj.iter().collect::<Vec<_>>(); | ||
355 | entries.sort_by_key(|it| it.0); | ||
356 | entries.into_iter().map(|(_k, v)| v).collect::<Vec<_>>() | ||
357 | } | ||
358 | |||
354 | let same_keys = l.len() == r.len() && l.keys().all(|k| r.contains_key(k)); | 359 | let same_keys = l.len() == r.len() && l.keys().all(|k| r.contains_key(k)); |
355 | if !same_keys { | 360 | if !same_keys { |
356 | return Some((expected, actual)); | 361 | return Some((expected, actual)); |
357 | } | 362 | } |
358 | 363 | ||
359 | l.values().zip(r.values()).filter_map(|(l, r)| find_mismatch(l, r)).next() | 364 | let l = sorted_values(l); |
365 | let r = sorted_values(r); | ||
366 | |||
367 | l.into_iter().zip(r).filter_map(|(l, r)| find_mismatch(l, r)).next() | ||
360 | } | 368 | } |
361 | (&Null, &Null) => None, | 369 | (Value::Null, Value::Null) => None, |
362 | // magic string literal "{...}" acts as wildcard for any sub-JSON | 370 | // magic string literal "{...}" acts as wildcard for any sub-JSON |
363 | (&String(ref l), _) if l == "{...}" => None, | 371 | (Value::String(l), _) if l == "{...}" => None, |
364 | _ => Some((expected, actual)), | 372 | _ => Some((expected, actual)), |
365 | } | 373 | } |
366 | } | 374 | } |
diff --git a/crates/vfs/src/anchored_path.rs b/crates/vfs/src/anchored_path.rs new file mode 100644 index 000000000..02720a32e --- /dev/null +++ b/crates/vfs/src/anchored_path.rs | |||
@@ -0,0 +1,39 @@ | |||
1 | //! Analysis-level representation of file-system paths. | ||
2 | //! | ||
3 | //! The primary goal of this is to losslessly represent paths like | ||
4 | //! | ||
5 | //! ``` | ||
6 | //! #[path = "./bar.rs"] | ||
7 | //! mod foo; | ||
8 | //! ``` | ||
9 | //! | ||
10 | //! The first approach one might reach for is to use `PathBuf`. The problem here | ||
11 | //! is that `PathBuf` depends on host target (windows or linux), but | ||
12 | //! rust-analyzer should be capable to process `#[path = r"C:\bar.rs"]` on Unix. | ||
13 | //! | ||
14 | //! The second try is to use a `String`. This also fails, however. Consider a | ||
15 | //! hypothetical scenario, where rust-analyzer operates in a | ||
16 | //! networked/distributed mode. There's one global instance of rust-analyzer, | ||
17 | //! which processes requests from different machines. Now, the semantics of | ||
18 | //! `#[path = "/abs/path.rs"]` actually depends on which file-system we are at! | ||
19 | //! That is, even absolute paths exist relative to a file system! | ||
20 | //! | ||
21 | //! A more realistic scenario here is virtual VFS paths we use for testing. More | ||
22 | //! generally, there can be separate "universes" of VFS paths. | ||
23 | //! | ||
24 | //! That's why we use anchored representation -- each path carries an info about | ||
25 | //! a file this path originates from. We can fetch fs/"universe" information | ||
26 | //! from the anchor than. | ||
27 | use crate::FileId; | ||
28 | |||
29 | #[derive(Clone, PartialEq, Eq, Debug)] | ||
30 | pub struct AnchoredPathBuf { | ||
31 | pub anchor: FileId, | ||
32 | pub path: String, | ||
33 | } | ||
34 | |||
35 | #[derive(Clone, Copy, PartialEq, Eq, Debug)] | ||
36 | pub struct AnchoredPath<'a> { | ||
37 | pub anchor: FileId, | ||
38 | pub path: &'a str, | ||
39 | } | ||
diff --git a/crates/vfs/src/file_set.rs b/crates/vfs/src/file_set.rs index 9093fbd97..49ca593ac 100644 --- a/crates/vfs/src/file_set.rs +++ b/crates/vfs/src/file_set.rs | |||
@@ -7,7 +7,7 @@ use std::fmt; | |||
7 | use fst::{IntoStreamer, Streamer}; | 7 | use fst::{IntoStreamer, Streamer}; |
8 | use rustc_hash::FxHashMap; | 8 | use rustc_hash::FxHashMap; |
9 | 9 | ||
10 | use crate::{FileId, Vfs, VfsPath}; | 10 | use crate::{AnchoredPath, FileId, Vfs, VfsPath}; |
11 | 11 | ||
12 | #[derive(Default, Clone, Eq, PartialEq)] | 12 | #[derive(Default, Clone, Eq, PartialEq)] |
13 | pub struct FileSet { | 13 | pub struct FileSet { |
@@ -19,10 +19,10 @@ impl FileSet { | |||
19 | pub fn len(&self) -> usize { | 19 | pub fn len(&self) -> usize { |
20 | self.files.len() | 20 | self.files.len() |
21 | } | 21 | } |
22 | pub fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { | 22 | pub fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> { |
23 | let mut base = self.paths[&anchor].clone(); | 23 | let mut base = self.paths[&path.anchor].clone(); |
24 | base.pop(); | 24 | base.pop(); |
25 | let path = base.join(path)?; | 25 | let path = base.join(path.path)?; |
26 | self.files.get(&path).copied() | 26 | self.files.get(&path).copied() |
27 | } | 27 | } |
28 | 28 | ||
diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs index cdf6f1fd0..9cf2afd33 100644 --- a/crates/vfs/src/lib.rs +++ b/crates/vfs/src/lib.rs | |||
@@ -36,6 +36,7 @@ | |||
36 | //! have a single `FileSet` which unions the two sources. | 36 | //! have a single `FileSet` which unions the two sources. |
37 | mod vfs_path; | 37 | mod vfs_path; |
38 | mod path_interner; | 38 | mod path_interner; |
39 | mod anchored_path; | ||
39 | pub mod file_set; | 40 | pub mod file_set; |
40 | pub mod loader; | 41 | pub mod loader; |
41 | 42 | ||
@@ -43,7 +44,10 @@ use std::{fmt, mem}; | |||
43 | 44 | ||
44 | use crate::path_interner::PathInterner; | 45 | use crate::path_interner::PathInterner; |
45 | 46 | ||
46 | pub use crate::vfs_path::VfsPath; | 47 | pub use crate::{ |
48 | anchored_path::{AnchoredPath, AnchoredPathBuf}, | ||
49 | vfs_path::VfsPath, | ||
50 | }; | ||
47 | pub use paths::{AbsPath, AbsPathBuf}; | 51 | pub use paths::{AbsPath, AbsPathBuf}; |
48 | 52 | ||
49 | #[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] | 53 | #[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] |
@@ -99,18 +103,19 @@ impl Vfs { | |||
99 | (file_id, path) | 103 | (file_id, path) |
100 | }) | 104 | }) |
101 | } | 105 | } |
102 | pub fn set_file_contents(&mut self, path: VfsPath, contents: Option<Vec<u8>>) { | 106 | pub fn set_file_contents(&mut self, path: VfsPath, contents: Option<Vec<u8>>) -> bool { |
103 | let file_id = self.alloc_file_id(path); | 107 | let file_id = self.alloc_file_id(path); |
104 | let change_kind = match (&self.get(file_id), &contents) { | 108 | let change_kind = match (&self.get(file_id), &contents) { |
105 | (None, None) => return, | 109 | (None, None) => return false, |
106 | (None, Some(_)) => ChangeKind::Create, | 110 | (None, Some(_)) => ChangeKind::Create, |
107 | (Some(_), None) => ChangeKind::Delete, | 111 | (Some(_), None) => ChangeKind::Delete, |
108 | (Some(old), Some(new)) if old == new => return, | 112 | (Some(old), Some(new)) if old == new => return false, |
109 | (Some(_), Some(_)) => ChangeKind::Modify, | 113 | (Some(_), Some(_)) => ChangeKind::Modify, |
110 | }; | 114 | }; |
111 | 115 | ||
112 | *self.get_mut(file_id) = contents; | 116 | *self.get_mut(file_id) = contents; |
113 | self.changes.push(ChangedFile { file_id, change_kind }) | 117 | self.changes.push(ChangedFile { file_id, change_kind }); |
118 | true | ||
114 | } | 119 | } |
115 | pub fn has_changes(&self) -> bool { | 120 | pub fn has_changes(&self) -> bool { |
116 | !self.changes.is_empty() | 121 | !self.changes.is_empty() |
diff --git a/docs/dev/README.md b/docs/dev/README.md index ca324493f..2795f6b5c 100644 --- a/docs/dev/README.md +++ b/docs/dev/README.md | |||
@@ -170,7 +170,7 @@ In general, API is centered around UI concerns -- the result of the call is what | |||
170 | The results are 100% Rust specific though. | 170 | The results are 100% Rust specific though. |
171 | Shout outs to LSP developers for popularizing the idea that "UI" is a good place to draw a boundary at. | 171 | Shout outs to LSP developers for popularizing the idea that "UI" is a good place to draw a boundary at. |
172 | 172 | ||
173 | ## LSP is sateless | 173 | ## LSP is stateless |
174 | 174 | ||
175 | The protocol is implemented in the mostly stateless way. | 175 | The protocol is implemented in the mostly stateless way. |
176 | A good mental model is HTTP, which doesn't store per-client state, and instead relies on devices like cookies to maintain an illusion of state. | 176 | A good mental model is HTTP, which doesn't store per-client state, and instead relies on devices like cookies to maintain an illusion of state. |
diff --git a/docs/dev/style.md b/docs/dev/style.md index 1a952197f..c8d943142 100644 --- a/docs/dev/style.md +++ b/docs/dev/style.md | |||
@@ -514,6 +514,7 @@ impl Parent { | |||
514 | Use boring and long names for local variables ([yay code completion](https://github.com/rust-analyzer/rust-analyzer/pull/4162#discussion_r417130973)). | 514 | Use boring and long names for local variables ([yay code completion](https://github.com/rust-analyzer/rust-analyzer/pull/4162#discussion_r417130973)). |
515 | The default name is a lowercased name of the type: `global_state: GlobalState`. | 515 | The default name is a lowercased name of the type: `global_state: GlobalState`. |
516 | Avoid ad-hoc acronyms and contractions, but use the ones that exist consistently (`db`, `ctx`, `acc`). | 516 | Avoid ad-hoc acronyms and contractions, but use the ones that exist consistently (`db`, `ctx`, `acc`). |
517 | Prefer American spelling (color, behavior). | ||
517 | 518 | ||
518 | Default names: | 519 | Default names: |
519 | 520 | ||
diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc new file mode 100644 index 000000000..cb2ae6fc1 --- /dev/null +++ b/docs/user/generated_config.adoc | |||
@@ -0,0 +1,106 @@ | |||
1 | rust-analyzer.assist.importMergeBehaviour (default: `"full"`):: | ||
2 | The strategy to use when inserting new imports or merging imports. | ||
3 | rust-analyzer.assist.importPrefix (default: `"plain"`):: | ||
4 | The path structure for newly inserted paths to use. | ||
5 | rust-analyzer.callInfo.full (default: `true`):: | ||
6 | Show function name and docs in parameter hints. | ||
7 | rust-analyzer.cargo.autoreload (default: `true`):: | ||
8 | Automatically refresh project info via `cargo metadata` on Cargo.toml changes. | ||
9 | rust-analyzer.cargo.allFeatures (default: `false`):: | ||
10 | Activate all available features. | ||
11 | rust-analyzer.cargo.features (default: `[]`):: | ||
12 | List of features to activate. | ||
13 | rust-analyzer.cargo.loadOutDirsFromCheck (default: `false`):: | ||
14 | Run `cargo check` on startup to get the correct value for package OUT_DIRs. | ||
15 | rust-analyzer.cargo.noDefaultFeatures (default: `false`):: | ||
16 | Do not activate the `default` feature. | ||
17 | rust-analyzer.cargo.target (default: `null`):: | ||
18 | Compilation target (target triple). | ||
19 | rust-analyzer.cargo.noSysroot (default: `false`):: | ||
20 | Internal config for debugging, disables loading of sysroot crates. | ||
21 | rust-analyzer.checkOnSave.enable (default: `true`):: | ||
22 | Run specified `cargo check` command for diagnostics on save. | ||
23 | rust-analyzer.checkOnSave.allFeatures (default: `null`):: | ||
24 | Check with all features (will be passed as `--all-features`). Defaults to `rust-analyzer.cargo.allFeatures`. | ||
25 | rust-analyzer.checkOnSave.allTargets (default: `true`):: | ||
26 | Check all targets and tests (will be passed as `--all-targets`). | ||
27 | rust-analyzer.checkOnSave.command (default: `"check"`):: | ||
28 | Cargo command to use for `cargo check`. | ||
29 | rust-analyzer.checkOnSave.noDefaultFeatures (default: `null`):: | ||
30 | Do not activate the `default` feature. | ||
31 | rust-analyzer.checkOnSave.target (default: `null`):: | ||
32 | Check for a specific target. Defaults to `rust-analyzer.cargo.target`. | ||
33 | rust-analyzer.checkOnSave.extraArgs (default: `[]`):: | ||
34 | Extra arguments for `cargo check`. | ||
35 | rust-analyzer.checkOnSave.features (default: `null`):: | ||
36 | List of features to activate. Defaults to `rust-analyzer.cargo.features`. | ||
37 | rust-analyzer.checkOnSave.overrideCommand (default: `null`):: | ||
38 | Advanced option, fully override the command rust-analyzer uses for checking. The command should include `--message-format=json` or similar option. | ||
39 | rust-analyzer.completion.addCallArgumentSnippets (default: `true`):: | ||
40 | Whether to add argument snippets when completing functions. | ||
41 | rust-analyzer.completion.addCallParenthesis (default: `true`):: | ||
42 | Whether to add parenthesis when completing functions. | ||
43 | rust-analyzer.completion.postfix.enable (default: `true`):: | ||
44 | Whether to show postfix snippets like `dbg`, `if`, `not`, etc. | ||
45 | rust-analyzer.completion.autoimport.enable (default: `true`):: | ||
46 | Toggles the additional completions that automatically add imports when completed. Note that your client have to specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. | ||
47 | rust-analyzer.diagnostics.enable (default: `true`):: | ||
48 | Whether to show native rust-analyzer diagnostics. | ||
49 | rust-analyzer.diagnostics.enableExperimental (default: `true`):: | ||
50 | Whether to show experimental rust-analyzer diagnostics that might have more false positives than usual. | ||
51 | rust-analyzer.diagnostics.disabled (default: `[]`):: | ||
52 | List of rust-analyzer diagnostics to disable. | ||
53 | rust-analyzer.diagnostics.warningsAsHint (default: `[]`):: | ||
54 | List of warnings that should be displayed with info severity.\nThe warnings will be indicated by a blue squiggly underline in code and a blue icon in the problems panel. | ||
55 | rust-analyzer.diagnostics.warningsAsInfo (default: `[]`):: | ||
56 | List of warnings that should be displayed with hint severity.\nThe warnings will be indicated by faded text or three dots in code and will not show up in the problems panel. | ||
57 | rust-analyzer.files.watcher (default: `"client"`):: | ||
58 | Controls file watching implementation. | ||
59 | rust-analyzer.hoverActions.debug (default: `true`):: | ||
60 | Whether to show `Debug` action. Only applies when `#rust-analyzer.hoverActions.enable#` is set. | ||
61 | rust-analyzer.hoverActions.enable (default: `true`):: | ||
62 | Whether to show HoverActions in Rust files. | ||
63 | rust-analyzer.hoverActions.gotoTypeDef (default: `true`):: | ||
64 | Whether to show `Go to Type Definition` action. Only applies when `#rust-analyzer.hoverActions.enable#` is set. | ||
65 | rust-analyzer.hoverActions.implementations (default: `true`):: | ||
66 | Whether to show `Implementations` action. Only applies when `#rust-analyzer.hoverActions.enable#` is set. | ||
67 | rust-analyzer.hoverActions.run (default: `true`):: | ||
68 | Whether to show `Run` action. Only applies when `#rust-analyzer.hoverActions.enable#` is set. | ||
69 | rust-analyzer.hoverActions.linksInHover (default: `true`):: | ||
70 | Use markdown syntax for links in hover. | ||
71 | rust-analyzer.inlayHints.chainingHints (default: `true`):: | ||
72 | Whether to show inlay type hints for method chains. | ||
73 | rust-analyzer.inlayHints.maxLength (default: `null`):: | ||
74 | Maximum length for inlay hints. | ||
75 | rust-analyzer.inlayHints.parameterHints (default: `true`):: | ||
76 | Whether to show function parameter name inlay hints at the call site. | ||
77 | rust-analyzer.inlayHints.typeHints (default: `true`):: | ||
78 | Whether to show inlay type hints for variables. | ||
79 | rust-analyzer.lens.debug (default: `true`):: | ||
80 | Whether to show `Debug` lens. Only applies when `#rust-analyzer.lens.enable#` is set. | ||
81 | rust-analyzer.lens.enable (default: `true`):: | ||
82 | Whether to show CodeLens in Rust files. | ||
83 | rust-analyzer.lens.implementations (default: `true`):: | ||
84 | Whether to show `Implementations` lens. Only applies when `#rust-analyzer.lens.enable#` is set. | ||
85 | rust-analyzer.lens.run (default: `true`):: | ||
86 | Whether to show `Run` lens. Only applies when `#rust-analyzer.lens.enable#` is set. | ||
87 | rust-analyzer.lens.methodReferences (default: `false`):: | ||
88 | Whether to show `Method References` lens. Only applies when `#rust-analyzer.lens.enable#` is set. | ||
89 | rust-analyzer.linkedProjects (default: `[]`):: | ||
90 | Disable project auto-discovery in favor of explicitly specified set of projects. \nElements must be paths pointing to Cargo.toml, rust-project.json, or JSON objects in rust-project.json format. | ||
91 | rust-analyzer.lruCapacity (default: `null`):: | ||
92 | Number of syntax trees rust-analyzer keeps in memory. | ||
93 | rust-analyzer.notifications.cargoTomlNotFound (default: `true`):: | ||
94 | Whether to show `can't find Cargo.toml` error message. | ||
95 | rust-analyzer.procMacro.enable (default: `false`):: | ||
96 | Enable Proc macro support, cargo.loadOutDirsFromCheck must be enabled. | ||
97 | rust-analyzer.runnables.overrideCargo (default: `null`):: | ||
98 | Command to be executed instead of 'cargo' for runnables. | ||
99 | rust-analyzer.runnables.cargoExtraArgs (default: `[]`):: | ||
100 | Additional arguments to be passed to cargo for runnables such as tests or binaries.\nFor example, it may be '--release'. | ||
101 | rust-analyzer.rustcSource (default: `null`):: | ||
102 | Path to the rust compiler sources, for usage in rustc_private projects. | ||
103 | rust-analyzer.rustfmt.extraArgs (default: `[]`):: | ||
104 | Additional arguments to rustfmt. | ||
105 | rust-analyzer.rustfmt.overrideCommand (default: `null`):: | ||
106 | Advanced option, fully override the command rust-analyzer uses for formatting. | ||
diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index 575e5866f..d85901686 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc | |||
@@ -263,6 +263,15 @@ If you get an error saying `No such file or directory: 'rust-analyzer'`, see the | |||
263 | 263 | ||
264 | GNOME Builder 3.37.1 and newer has native `rust-analyzer` support. If the LSP binary is not available, GNOME Builder can install it when opening a Rust file. | 264 | GNOME Builder 3.37.1 and newer has native `rust-analyzer` support. If the LSP binary is not available, GNOME Builder can install it when opening a Rust file. |
265 | 265 | ||
266 | == Configuration | ||
267 | |||
268 | rust-analyzer is configured via LSP messages, which means that it's up to the editor to decide on the exact format and location of configuration files. | ||
269 | Please consult your editor's documentation to learn how to configure LSP servers. | ||
270 | |||
271 | This is the list of config options rust-analyzer supports: | ||
272 | |||
273 | include::./generated_config.adoc[] | ||
274 | |||
266 | == Non-Cargo Based Projects | 275 | == Non-Cargo Based Projects |
267 | 276 | ||
268 | rust-analyzer does not require Cargo. | 277 | rust-analyzer does not require Cargo. |
diff --git a/editors/code/package.json b/editors/code/package.json index dbde37005..ca5f2ebc8 100644 --- a/editors/code/package.json +++ b/editors/code/package.json | |||
@@ -215,169 +215,6 @@ | |||
215 | "type": "object", | 215 | "type": "object", |
216 | "title": "Rust Analyzer", | 216 | "title": "Rust Analyzer", |
217 | "properties": { | 217 | "properties": { |
218 | "rust-analyzer.lruCapacity": { | ||
219 | "type": [ | ||
220 | "null", | ||
221 | "integer" | ||
222 | ], | ||
223 | "default": null, | ||
224 | "minimum": 0, | ||
225 | "exclusiveMinimum": true, | ||
226 | "description": "Number of syntax trees rust-analyzer keeps in memory." | ||
227 | }, | ||
228 | "rust-analyzer.files.watcher": { | ||
229 | "type": "string", | ||
230 | "enum": [ | ||
231 | "client", | ||
232 | "notify" | ||
233 | ], | ||
234 | "default": "client", | ||
235 | "description": "Controls file watching implementation." | ||
236 | }, | ||
237 | "rust-analyzer.files.exclude": { | ||
238 | "type": "array", | ||
239 | "items": { | ||
240 | "type": "string" | ||
241 | }, | ||
242 | "default": [], | ||
243 | "description": "Paths to exclude from analysis." | ||
244 | }, | ||
245 | "rust-analyzer.notifications.cargoTomlNotFound": { | ||
246 | "type": "boolean", | ||
247 | "default": true, | ||
248 | "markdownDescription": "Whether to show `can't find Cargo.toml` error message" | ||
249 | }, | ||
250 | "rust-analyzer.cargo.autoreload": { | ||
251 | "type": "boolean", | ||
252 | "default": true, | ||
253 | "markdownDescription": "Automatically refresh project info via `cargo metadata` on Cargo.toml changes" | ||
254 | }, | ||
255 | "rust-analyzer.cargo.noDefaultFeatures": { | ||
256 | "type": "boolean", | ||
257 | "default": false, | ||
258 | "markdownDescription": "Do not activate the `default` feature" | ||
259 | }, | ||
260 | "rust-analyzer.cargo.allFeatures": { | ||
261 | "type": "boolean", | ||
262 | "default": false, | ||
263 | "description": "Activate all available features" | ||
264 | }, | ||
265 | "rust-analyzer.cargo.features": { | ||
266 | "type": "array", | ||
267 | "items": { | ||
268 | "type": "string" | ||
269 | }, | ||
270 | "default": [], | ||
271 | "description": "List of features to activate" | ||
272 | }, | ||
273 | "rust-analyzer.cargo.loadOutDirsFromCheck": { | ||
274 | "type": "boolean", | ||
275 | "default": false, | ||
276 | "markdownDescription": "Run `cargo check` on startup to get the correct value for package OUT_DIRs" | ||
277 | }, | ||
278 | "rust-analyzer.cargo.target": { | ||
279 | "type": [ | ||
280 | "null", | ||
281 | "string" | ||
282 | ], | ||
283 | "default": null, | ||
284 | "description": "Specify the compilation target" | ||
285 | }, | ||
286 | "rust-analyzer.noSysroot": { | ||
287 | "markdownDescription": "Internal config for debugging, disables loading of sysroot crates", | ||
288 | "type": "boolean", | ||
289 | "default": false | ||
290 | }, | ||
291 | "rust-analyzer.rustfmt.extraArgs": { | ||
292 | "type": "array", | ||
293 | "items": { | ||
294 | "type": "string" | ||
295 | }, | ||
296 | "default": [], | ||
297 | "description": "Additional arguments to rustfmt" | ||
298 | }, | ||
299 | "rust-analyzer.rustfmt.overrideCommand": { | ||
300 | "type": [ | ||
301 | "null", | ||
302 | "array" | ||
303 | ], | ||
304 | "items": { | ||
305 | "type": "string", | ||
306 | "minItems": 1 | ||
307 | }, | ||
308 | "default": null, | ||
309 | "markdownDescription": "Advanced option, fully override the command rust-analyzer uses for formatting." | ||
310 | }, | ||
311 | "rust-analyzer.checkOnSave.enable": { | ||
312 | "type": "boolean", | ||
313 | "default": true, | ||
314 | "markdownDescription": "Run specified `cargo check` command for diagnostics on save" | ||
315 | }, | ||
316 | "rust-analyzer.checkOnSave.extraArgs": { | ||
317 | "type": "array", | ||
318 | "items": { | ||
319 | "type": "string" | ||
320 | }, | ||
321 | "markdownDescription": "Extra arguments for `cargo check`", | ||
322 | "default": [] | ||
323 | }, | ||
324 | "rust-analyzer.checkOnSave.command": { | ||
325 | "type": "string", | ||
326 | "default": "check", | ||
327 | "markdownDescription": "Cargo command to use for `cargo check`" | ||
328 | }, | ||
329 | "rust-analyzer.checkOnSave.overrideCommand": { | ||
330 | "type": [ | ||
331 | "null", | ||
332 | "array" | ||
333 | ], | ||
334 | "items": { | ||
335 | "type": "string", | ||
336 | "minItems": 1 | ||
337 | }, | ||
338 | "default": null, | ||
339 | "markdownDescription": "Advanced option, fully override the command rust-analyzer uses for checking. The command should include `--message-format=json` or similar option." | ||
340 | }, | ||
341 | "rust-analyzer.checkOnSave.allTargets": { | ||
342 | "type": "boolean", | ||
343 | "default": true, | ||
344 | "markdownDescription": "Check all targets and tests (will be passed as `--all-targets`)" | ||
345 | }, | ||
346 | "rust-analyzer.checkOnSave.noDefaultFeatures": { | ||
347 | "type": [ | ||
348 | "null", | ||
349 | "boolean" | ||
350 | ], | ||
351 | "default": null, | ||
352 | "markdownDescription": "Do not activate the `default` feature" | ||
353 | }, | ||
354 | "rust-analyzer.checkOnSave.allFeatures": { | ||
355 | "type": [ | ||
356 | "null", | ||
357 | "boolean" | ||
358 | ], | ||
359 | "default": null, | ||
360 | "markdownDescription": "Check with all features (will be passed as `--all-features`). Defaults to `rust-analyzer.cargo.allFeatures`." | ||
361 | }, | ||
362 | "rust-analyzer.checkOnSave.features": { | ||
363 | "type": [ | ||
364 | "null", | ||
365 | "array" | ||
366 | ], | ||
367 | "items": { | ||
368 | "type": "string" | ||
369 | }, | ||
370 | "default": null, | ||
371 | "description": "List of features to activate. Defaults to `rust-analyzer.cargo.features`." | ||
372 | }, | ||
373 | "rust-analyzer.checkOnSave.target": { | ||
374 | "type": [ | ||
375 | "null", | ||
376 | "string" | ||
377 | ], | ||
378 | "default": null, | ||
379 | "description": "Check for a specific target. Defaults to `rust-analyzer.cargo.target`." | ||
380 | }, | ||
381 | "rust-analyzer.cargoRunner": { | 218 | "rust-analyzer.cargoRunner": { |
382 | "type": [ | 219 | "type": [ |
383 | "null", | 220 | "null", |
@@ -420,59 +257,6 @@ | |||
420 | "default": true, | 257 | "default": true, |
421 | "description": "Whether to show inlay hints" | 258 | "description": "Whether to show inlay hints" |
422 | }, | 259 | }, |
423 | "rust-analyzer.inlayHints.typeHints": { | ||
424 | "type": "boolean", | ||
425 | "default": true, | ||
426 | "description": "Whether to show inlay type hints for variables." | ||
427 | }, | ||
428 | "rust-analyzer.inlayHints.chainingHints": { | ||
429 | "type": "boolean", | ||
430 | "default": true, | ||
431 | "description": "Whether to show inlay type hints for method chains." | ||
432 | }, | ||
433 | "rust-analyzer.inlayHints.parameterHints": { | ||
434 | "type": "boolean", | ||
435 | "default": true, | ||
436 | "description": "Whether to show function parameter name inlay hints at the call site." | ||
437 | }, | ||
438 | "rust-analyzer.inlayHints.maxLength": { | ||
439 | "type": [ | ||
440 | "null", | ||
441 | "integer" | ||
442 | ], | ||
443 | "default": 20, | ||
444 | "minimum": 0, | ||
445 | "exclusiveMinimum": true, | ||
446 | "description": "Maximum length for inlay hints" | ||
447 | }, | ||
448 | "rust-analyzer.completion.addCallParenthesis": { | ||
449 | "type": "boolean", | ||
450 | "default": true, | ||
451 | "description": "Whether to add parenthesis when completing functions" | ||
452 | }, | ||
453 | "rust-analyzer.completion.addCallArgumentSnippets": { | ||
454 | "type": "boolean", | ||
455 | "default": true, | ||
456 | "description": "Whether to add argument snippets when completing functions" | ||
457 | }, | ||
458 | "rust-analyzer.completion.postfix.enable": { | ||
459 | "type": "boolean", | ||
460 | "default": true, | ||
461 | "markdownDescription": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc." | ||
462 | }, | ||
463 | "rust-analyzer.completion.autoimport.enable": { | ||
464 | "type": "boolean", | ||
465 | "default": true, | ||
466 | "markdownDescription": [ | ||
467 | "Toggles the additional completions that automatically add imports when completed.", | ||
468 | "Note that your client have to specify the `additionalTextEdits` LSP client capability to truly have this feature enabled" | ||
469 | ] | ||
470 | }, | ||
471 | "rust-analyzer.callInfo.full": { | ||
472 | "type": "boolean", | ||
473 | "default": true, | ||
474 | "description": "Show function name and docs in parameter hints" | ||
475 | }, | ||
476 | "rust-analyzer.updates.channel": { | 260 | "rust-analyzer.updates.channel": { |
477 | "type": "string", | 261 | "type": "string", |
478 | "enum": [ | 262 | "enum": [ |
@@ -520,11 +304,6 @@ | |||
520 | "type": "boolean", | 304 | "type": "boolean", |
521 | "default": false | 305 | "default": false |
522 | }, | 306 | }, |
523 | "rust-analyzer.procMacro.enable": { | ||
524 | "description": "Enable Proc macro support, cargo.loadOutDirsFromCheck must be enabled.", | ||
525 | "type": "boolean", | ||
526 | "default": false | ||
527 | }, | ||
528 | "rust-analyzer.debug.engine": { | 307 | "rust-analyzer.debug.engine": { |
529 | "type": "string", | 308 | "type": "string", |
530 | "enum": [ | 309 | "enum": [ |
@@ -557,157 +336,362 @@ | |||
557 | "default": {}, | 336 | "default": {}, |
558 | "description": "Optional settings passed to the debug engine. Example:\n{ \"lldb\": { \"terminal\":\"external\"} }" | 337 | "description": "Optional settings passed to the debug engine. Example:\n{ \"lldb\": { \"terminal\":\"external\"} }" |
559 | }, | 338 | }, |
560 | "rust-analyzer.lens.enable": { | 339 | "rust-analyzer.assist.importMergeBehaviour": { |
561 | "description": "Whether to show CodeLens in Rust files.", | 340 | "markdownDescription": "The strategy to use when inserting new imports or merging imports.", |
562 | "type": "boolean", | 341 | "default": "full", |
563 | "default": true | 342 | "type": "string", |
343 | "enum": [ | ||
344 | "none", | ||
345 | "full", | ||
346 | "last" | ||
347 | ], | ||
348 | "enumDescriptions": [ | ||
349 | "No merging", | ||
350 | "Merge all layers of the import trees", | ||
351 | "Only merge the last layer of the import trees" | ||
352 | ] | ||
564 | }, | 353 | }, |
565 | "rust-analyzer.lens.run": { | 354 | "rust-analyzer.assist.importPrefix": { |
566 | "markdownDescription": "Whether to show `Run` lens. Only applies when `#rust-analyzer.lens.enable#` is set.", | 355 | "markdownDescription": "The path structure for newly inserted paths to use.", |
567 | "type": "boolean", | 356 | "default": "plain", |
568 | "default": true | 357 | "type": "string", |
358 | "enum": [ | ||
359 | "plain", | ||
360 | "by_self", | ||
361 | "by_crate" | ||
362 | ], | ||
363 | "enumDescriptions": [ | ||
364 | "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item.", | ||
365 | "Prefix all import paths with `self` if they don't begin with `self`, `super`, `crate` or a crate name", | ||
366 | "Force import paths to be absolute by always starting them with `crate` or the crate name they refer to." | ||
367 | ] | ||
569 | }, | 368 | }, |
570 | "rust-analyzer.lens.debug": { | 369 | "rust-analyzer.callInfo.full": { |
571 | "markdownDescription": "Whether to show `Debug` lens. Only applies when `#rust-analyzer.lens.enable#` is set.", | 370 | "markdownDescription": "Show function name and docs in parameter hints.", |
572 | "type": "boolean", | 371 | "default": true, |
573 | "default": true | 372 | "type": "boolean" |
574 | }, | 373 | }, |
575 | "rust-analyzer.lens.implementations": { | 374 | "rust-analyzer.cargo.autoreload": { |
576 | "markdownDescription": "Whether to show `Implementations` lens. Only applies when `#rust-analyzer.lens.enable#` is set.", | 375 | "markdownDescription": "Automatically refresh project info via `cargo metadata` on Cargo.toml changes.", |
577 | "type": "boolean", | 376 | "default": true, |
578 | "default": true | 377 | "type": "boolean" |
579 | }, | 378 | }, |
580 | "rust-analyzer.lens.methodReferences": { | 379 | "rust-analyzer.cargo.allFeatures": { |
581 | "markdownDescription": "Whether to show `Method References` lens. Only applies when `#rust-analyzer.lens.enable#` is set.", | 380 | "markdownDescription": "Activate all available features.", |
582 | "type": "boolean", | 381 | "default": false, |
583 | "default": false | 382 | "type": "boolean" |
584 | }, | 383 | }, |
585 | "rust-analyzer.hoverActions.enable": { | 384 | "rust-analyzer.cargo.features": { |
586 | "description": "Whether to show HoverActions in Rust files.", | 385 | "markdownDescription": "List of features to activate.", |
587 | "type": "boolean", | 386 | "default": [], |
588 | "default": true | 387 | "type": "array", |
388 | "items": { | ||
389 | "type": "string" | ||
390 | } | ||
589 | }, | 391 | }, |
590 | "rust-analyzer.hoverActions.implementations": { | 392 | "rust-analyzer.cargo.loadOutDirsFromCheck": { |
591 | "markdownDescription": "Whether to show `Implementations` action. Only applies when `#rust-analyzer.hoverActions.enable#` is set.", | 393 | "markdownDescription": "Run `cargo check` on startup to get the correct value for package OUT_DIRs.", |
592 | "type": "boolean", | 394 | "default": false, |
593 | "default": true | 395 | "type": "boolean" |
594 | }, | 396 | }, |
595 | "rust-analyzer.hoverActions.run": { | 397 | "rust-analyzer.cargo.noDefaultFeatures": { |
596 | "markdownDescription": "Whether to show `Run` action. Only applies when `#rust-analyzer.hoverActions.enable#` is set.", | 398 | "markdownDescription": "Do not activate the `default` feature.", |
597 | "type": "boolean", | 399 | "default": false, |
598 | "default": true | 400 | "type": "boolean" |
599 | }, | 401 | }, |
600 | "rust-analyzer.hoverActions.debug": { | 402 | "rust-analyzer.cargo.target": { |
601 | "markdownDescription": "Whether to show `Debug` action. Only applies when `#rust-analyzer.hoverActions.enable#` is set.", | 403 | "markdownDescription": "Compilation target (target triple).", |
602 | "type": "boolean", | 404 | "default": null, |
603 | "default": true | 405 | "type": [ |
406 | "null", | ||
407 | "string" | ||
408 | ] | ||
604 | }, | 409 | }, |
605 | "rust-analyzer.hoverActions.gotoTypeDef": { | 410 | "rust-analyzer.cargo.noSysroot": { |
606 | "markdownDescription": "Whether to show `Go to Type Definition` action. Only applies when `#rust-analyzer.hoverActions.enable#` is set.", | 411 | "markdownDescription": "Internal config for debugging, disables loading of sysroot crates.", |
607 | "type": "boolean", | 412 | "default": false, |
608 | "default": true | 413 | "type": "boolean" |
609 | }, | 414 | }, |
610 | "rust-analyzer.linkedProjects": { | 415 | "rust-analyzer.checkOnSave.enable": { |
611 | "markdownDescription": "Disable project auto-discovery in favor of explicitly specified set of projects. \nElements must be paths pointing to Cargo.toml, rust-project.json, or JSON objects in rust-project.json format", | 416 | "markdownDescription": "Run specified `cargo check` command for diagnostics on save.", |
417 | "default": true, | ||
418 | "type": "boolean" | ||
419 | }, | ||
420 | "rust-analyzer.checkOnSave.allFeatures": { | ||
421 | "markdownDescription": "Check with all features (will be passed as `--all-features`). Defaults to `rust-analyzer.cargo.allFeatures`.", | ||
422 | "default": null, | ||
423 | "type": [ | ||
424 | "null", | ||
425 | "boolean" | ||
426 | ] | ||
427 | }, | ||
428 | "rust-analyzer.checkOnSave.allTargets": { | ||
429 | "markdownDescription": "Check all targets and tests (will be passed as `--all-targets`).", | ||
430 | "default": true, | ||
431 | "type": "boolean" | ||
432 | }, | ||
433 | "rust-analyzer.checkOnSave.command": { | ||
434 | "markdownDescription": "Cargo command to use for `cargo check`.", | ||
435 | "default": "check", | ||
436 | "type": "string" | ||
437 | }, | ||
438 | "rust-analyzer.checkOnSave.noDefaultFeatures": { | ||
439 | "markdownDescription": "Do not activate the `default` feature.", | ||
440 | "default": null, | ||
441 | "type": [ | ||
442 | "null", | ||
443 | "boolean" | ||
444 | ] | ||
445 | }, | ||
446 | "rust-analyzer.checkOnSave.target": { | ||
447 | "markdownDescription": "Check for a specific target. Defaults to `rust-analyzer.cargo.target`.", | ||
448 | "default": null, | ||
449 | "type": [ | ||
450 | "null", | ||
451 | "string" | ||
452 | ] | ||
453 | }, | ||
454 | "rust-analyzer.checkOnSave.extraArgs": { | ||
455 | "markdownDescription": "Extra arguments for `cargo check`.", | ||
456 | "default": [], | ||
612 | "type": "array", | 457 | "type": "array", |
613 | "items": { | 458 | "items": { |
614 | "type": [ | 459 | "type": "string" |
615 | "string", | 460 | } |
616 | "object" | 461 | }, |
617 | ] | 462 | "rust-analyzer.checkOnSave.features": { |
618 | }, | 463 | "markdownDescription": "List of features to activate. Defaults to `rust-analyzer.cargo.features`.", |
619 | "default": null | 464 | "default": null, |
465 | "type": [ | ||
466 | "null", | ||
467 | "array" | ||
468 | ], | ||
469 | "items": { | ||
470 | "type": "string" | ||
471 | } | ||
472 | }, | ||
473 | "rust-analyzer.checkOnSave.overrideCommand": { | ||
474 | "markdownDescription": "Advanced option, fully override the command rust-analyzer uses for checking. The command should include `--message-format=json` or similar option.", | ||
475 | "default": null, | ||
476 | "type": [ | ||
477 | "null", | ||
478 | "array" | ||
479 | ], | ||
480 | "items": { | ||
481 | "type": "string" | ||
482 | } | ||
483 | }, | ||
484 | "rust-analyzer.completion.addCallArgumentSnippets": { | ||
485 | "markdownDescription": "Whether to add argument snippets when completing functions.", | ||
486 | "default": true, | ||
487 | "type": "boolean" | ||
488 | }, | ||
489 | "rust-analyzer.completion.addCallParenthesis": { | ||
490 | "markdownDescription": "Whether to add parenthesis when completing functions.", | ||
491 | "default": true, | ||
492 | "type": "boolean" | ||
493 | }, | ||
494 | "rust-analyzer.completion.postfix.enable": { | ||
495 | "markdownDescription": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc.", | ||
496 | "default": true, | ||
497 | "type": "boolean" | ||
498 | }, | ||
499 | "rust-analyzer.completion.autoimport.enable": { | ||
500 | "markdownDescription": "Toggles the additional completions that automatically add imports when completed. Note that your client have to specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.", | ||
501 | "default": true, | ||
502 | "type": "boolean" | ||
620 | }, | 503 | }, |
621 | "rust-analyzer.diagnostics.enable": { | 504 | "rust-analyzer.diagnostics.enable": { |
622 | "type": "boolean", | 505 | "markdownDescription": "Whether to show native rust-analyzer diagnostics.", |
623 | "default": true, | 506 | "default": true, |
624 | "markdownDescription": "Whether to show native rust-analyzer diagnostics." | 507 | "type": "boolean" |
625 | }, | 508 | }, |
626 | "rust-analyzer.diagnostics.enableExperimental": { | 509 | "rust-analyzer.diagnostics.enableExperimental": { |
627 | "type": "boolean", | 510 | "markdownDescription": "Whether to show experimental rust-analyzer diagnostics that might have more false positives than usual.", |
628 | "default": true, | 511 | "default": true, |
629 | "markdownDescription": "Whether to show experimental rust-analyzer diagnostics that might have more false positives than usual." | 512 | "type": "boolean" |
630 | }, | 513 | }, |
631 | "rust-analyzer.diagnostics.disabled": { | 514 | "rust-analyzer.diagnostics.disabled": { |
515 | "markdownDescription": "List of rust-analyzer diagnostics to disable.", | ||
516 | "default": [], | ||
632 | "type": "array", | 517 | "type": "array", |
633 | "uniqueItems": true, | ||
634 | "items": { | 518 | "items": { |
635 | "type": "string" | 519 | "type": "string" |
636 | }, | 520 | }, |
637 | "description": "List of rust-analyzer diagnostics to disable", | 521 | "uniqueItems": true |
638 | "default": [] | ||
639 | }, | 522 | }, |
640 | "rust-analyzer.diagnostics.warningsAsInfo": { | 523 | "rust-analyzer.diagnostics.warningsAsHint": { |
524 | "markdownDescription": "List of warnings that should be displayed with info severity.\\nThe warnings will be indicated by a blue squiggly underline in code and a blue icon in the problems panel.", | ||
525 | "default": [], | ||
641 | "type": "array", | 526 | "type": "array", |
642 | "uniqueItems": true, | ||
643 | "items": { | 527 | "items": { |
644 | "type": "string" | 528 | "type": "string" |
645 | }, | 529 | } |
646 | "description": "List of warnings that should be displayed with info severity.\nThe warnings will be indicated by a blue squiggly underline in code and a blue icon in the problems panel.", | ||
647 | "default": [] | ||
648 | }, | 530 | }, |
649 | "rust-analyzer.diagnostics.warningsAsHint": { | 531 | "rust-analyzer.diagnostics.warningsAsInfo": { |
532 | "markdownDescription": "List of warnings that should be displayed with hint severity.\\nThe warnings will be indicated by faded text or three dots in code and will not show up in the problems panel.", | ||
533 | "default": [], | ||
650 | "type": "array", | 534 | "type": "array", |
651 | "uniqueItems": true, | ||
652 | "items": { | 535 | "items": { |
653 | "type": "string" | 536 | "type": "string" |
654 | }, | 537 | } |
655 | "description": "List of warnings that should be displayed with hint severity.\nThe warnings will be indicated by faded text or three dots in code and will not show up in the problems panel.", | ||
656 | "default": [] | ||
657 | }, | 538 | }, |
658 | "rust-analyzer.assist.importMergeBehaviour": { | 539 | "rust-analyzer.files.watcher": { |
659 | "type": "string", | 540 | "markdownDescription": "Controls file watching implementation.", |
660 | "enum": [ | 541 | "default": "client", |
661 | "none", | 542 | "type": "string" |
662 | "full", | ||
663 | "last" | ||
664 | ], | ||
665 | "enumDescriptions": [ | ||
666 | "No merging", | ||
667 | "Merge all layers of the import trees", | ||
668 | "Only merge the last layer of the import trees" | ||
669 | ], | ||
670 | "default": "full", | ||
671 | "description": "The strategy to use when inserting new imports or merging imports." | ||
672 | }, | 543 | }, |
673 | "rust-analyzer.assist.importPrefix": { | 544 | "rust-analyzer.hoverActions.debug": { |
674 | "type": "string", | 545 | "markdownDescription": "Whether to show `Debug` action. Only applies when `#rust-analyzer.hoverActions.enable#` is set.", |
675 | "enum": [ | 546 | "default": true, |
676 | "plain", | 547 | "type": "boolean" |
677 | "by_self", | 548 | }, |
678 | "by_crate" | 549 | "rust-analyzer.hoverActions.enable": { |
550 | "markdownDescription": "Whether to show HoverActions in Rust files.", | ||
551 | "default": true, | ||
552 | "type": "boolean" | ||
553 | }, | ||
554 | "rust-analyzer.hoverActions.gotoTypeDef": { | ||
555 | "markdownDescription": "Whether to show `Go to Type Definition` action. Only applies when `#rust-analyzer.hoverActions.enable#` is set.", | ||
556 | "default": true, | ||
557 | "type": "boolean" | ||
558 | }, | ||
559 | "rust-analyzer.hoverActions.implementations": { | ||
560 | "markdownDescription": "Whether to show `Implementations` action. Only applies when `#rust-analyzer.hoverActions.enable#` is set.", | ||
561 | "default": true, | ||
562 | "type": "boolean" | ||
563 | }, | ||
564 | "rust-analyzer.hoverActions.run": { | ||
565 | "markdownDescription": "Whether to show `Run` action. Only applies when `#rust-analyzer.hoverActions.enable#` is set.", | ||
566 | "default": true, | ||
567 | "type": "boolean" | ||
568 | }, | ||
569 | "rust-analyzer.hoverActions.linksInHover": { | ||
570 | "markdownDescription": "Use markdown syntax for links in hover.", | ||
571 | "default": true, | ||
572 | "type": "boolean" | ||
573 | }, | ||
574 | "rust-analyzer.inlayHints.chainingHints": { | ||
575 | "markdownDescription": "Whether to show inlay type hints for method chains.", | ||
576 | "default": true, | ||
577 | "type": "boolean" | ||
578 | }, | ||
579 | "rust-analyzer.inlayHints.maxLength": { | ||
580 | "markdownDescription": "Maximum length for inlay hints.", | ||
581 | "default": null, | ||
582 | "type": [ | ||
583 | "null", | ||
584 | "integer" | ||
679 | ], | 585 | ], |
680 | "enumDescriptions": [ | 586 | "minimum": 0 |
681 | "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item.", | 587 | }, |
682 | "Prefix all import paths with `self` if they don't begin with `self`, `super`, `crate` or a crate name", | 588 | "rust-analyzer.inlayHints.parameterHints": { |
683 | "Force import paths to be absolute by always starting them with `crate` or the crate name they refer to." | 589 | "markdownDescription": "Whether to show function parameter name inlay hints at the call site.", |
590 | "default": true, | ||
591 | "type": "boolean" | ||
592 | }, | ||
593 | "rust-analyzer.inlayHints.typeHints": { | ||
594 | "markdownDescription": "Whether to show inlay type hints for variables.", | ||
595 | "default": true, | ||
596 | "type": "boolean" | ||
597 | }, | ||
598 | "rust-analyzer.lens.debug": { | ||
599 | "markdownDescription": "Whether to show `Debug` lens. Only applies when `#rust-analyzer.lens.enable#` is set.", | ||
600 | "default": true, | ||
601 | "type": "boolean" | ||
602 | }, | ||
603 | "rust-analyzer.lens.enable": { | ||
604 | "markdownDescription": "Whether to show CodeLens in Rust files.", | ||
605 | "default": true, | ||
606 | "type": "boolean" | ||
607 | }, | ||
608 | "rust-analyzer.lens.implementations": { | ||
609 | "markdownDescription": "Whether to show `Implementations` lens. Only applies when `#rust-analyzer.lens.enable#` is set.", | ||
610 | "default": true, | ||
611 | "type": "boolean" | ||
612 | }, | ||
613 | "rust-analyzer.lens.run": { | ||
614 | "markdownDescription": "Whether to show `Run` lens. Only applies when `#rust-analyzer.lens.enable#` is set.", | ||
615 | "default": true, | ||
616 | "type": "boolean" | ||
617 | }, | ||
618 | "rust-analyzer.lens.methodReferences": { | ||
619 | "markdownDescription": "Whether to show `Method References` lens. Only applies when `#rust-analyzer.lens.enable#` is set.", | ||
620 | "default": false, | ||
621 | "type": "boolean" | ||
622 | }, | ||
623 | "rust-analyzer.linkedProjects": { | ||
624 | "markdownDescription": "Disable project auto-discovery in favor of explicitly specified set of projects. \\nElements must be paths pointing to Cargo.toml, rust-project.json, or JSON objects in rust-project.json format.", | ||
625 | "default": [], | ||
626 | "type": "array", | ||
627 | "items": { | ||
628 | "type": [ | ||
629 | "string", | ||
630 | "object" | ||
631 | ] | ||
632 | } | ||
633 | }, | ||
634 | "rust-analyzer.lruCapacity": { | ||
635 | "markdownDescription": "Number of syntax trees rust-analyzer keeps in memory.", | ||
636 | "default": null, | ||
637 | "type": [ | ||
638 | "null", | ||
639 | "integer" | ||
684 | ], | 640 | ], |
685 | "default": "plain", | 641 | "minimum": 0 |
686 | "description": "The path structure for newly inserted paths to use." | 642 | }, |
643 | "rust-analyzer.notifications.cargoTomlNotFound": { | ||
644 | "markdownDescription": "Whether to show `can't find Cargo.toml` error message.", | ||
645 | "default": true, | ||
646 | "type": "boolean" | ||
647 | }, | ||
648 | "rust-analyzer.procMacro.enable": { | ||
649 | "markdownDescription": "Enable Proc macro support, cargo.loadOutDirsFromCheck must be enabled.", | ||
650 | "default": false, | ||
651 | "type": "boolean" | ||
687 | }, | 652 | }, |
688 | "rust-analyzer.runnables.overrideCargo": { | 653 | "rust-analyzer.runnables.overrideCargo": { |
654 | "markdownDescription": "Command to be executed instead of 'cargo' for runnables.", | ||
655 | "default": null, | ||
689 | "type": [ | 656 | "type": [ |
690 | "null", | 657 | "null", |
691 | "string" | 658 | "string" |
692 | ], | 659 | ] |
693 | "default": null, | ||
694 | "description": "Command to be executed instead of 'cargo' for runnables." | ||
695 | }, | 660 | }, |
696 | "rust-analyzer.runnables.cargoExtraArgs": { | 661 | "rust-analyzer.runnables.cargoExtraArgs": { |
662 | "markdownDescription": "Additional arguments to be passed to cargo for runnables such as tests or binaries.\\nFor example, it may be '--release'.", | ||
663 | "default": [], | ||
697 | "type": "array", | 664 | "type": "array", |
698 | "items": { | 665 | "items": { |
699 | "type": "string" | 666 | "type": "string" |
700 | }, | 667 | } |
701 | "default": [], | ||
702 | "description": "Additional arguments to be passed to cargo for runnables such as tests or binaries.\nFor example, it may be '--release'" | ||
703 | }, | 668 | }, |
704 | "rust-analyzer.rustcSource": { | 669 | "rust-analyzer.rustcSource": { |
670 | "markdownDescription": "Path to the rust compiler sources, for usage in rustc_private projects.", | ||
671 | "default": null, | ||
705 | "type": [ | 672 | "type": [ |
706 | "null", | 673 | "null", |
707 | "string" | 674 | "string" |
708 | ], | 675 | ] |
676 | }, | ||
677 | "rust-analyzer.rustfmt.extraArgs": { | ||
678 | "markdownDescription": "Additional arguments to rustfmt.", | ||
679 | "default": [], | ||
680 | "type": "array", | ||
681 | "items": { | ||
682 | "type": "string" | ||
683 | } | ||
684 | }, | ||
685 | "rust-analyzer.rustfmt.overrideCommand": { | ||
686 | "markdownDescription": "Advanced option, fully override the command rust-analyzer uses for formatting.", | ||
709 | "default": null, | 687 | "default": null, |
710 | "description": "Path to the rust compiler sources, for usage in rustc_private projects." | 688 | "type": [ |
689 | "null", | ||
690 | "array" | ||
691 | ], | ||
692 | "items": { | ||
693 | "type": "string" | ||
694 | } | ||
711 | } | 695 | } |
712 | } | 696 | } |
713 | }, | 697 | }, |
diff --git a/xtask/src/install.rs b/xtask/src/install.rs index 78a8af797..12962bcfa 100644 --- a/xtask/src/install.rs +++ b/xtask/src/install.rs | |||
@@ -16,6 +16,7 @@ pub struct InstallCmd { | |||
16 | #[derive(Clone, Copy)] | 16 | #[derive(Clone, Copy)] |
17 | pub enum ClientOpt { | 17 | pub enum ClientOpt { |
18 | VsCode, | 18 | VsCode, |
19 | VsCodeExploration, | ||
19 | VsCodeInsiders, | 20 | VsCodeInsiders, |
20 | VsCodium, | 21 | VsCodium, |
21 | VsCodeOss, | 22 | VsCodeOss, |
@@ -26,10 +27,11 @@ impl ClientOpt { | |||
26 | pub const fn as_cmds(&self) -> &'static [&'static str] { | 27 | pub const fn as_cmds(&self) -> &'static [&'static str] { |
27 | match self { | 28 | match self { |
28 | ClientOpt::VsCode => &["code"], | 29 | ClientOpt::VsCode => &["code"], |
30 | ClientOpt::VsCodeExploration => &["code-exploration"], | ||
29 | ClientOpt::VsCodeInsiders => &["code-insiders"], | 31 | ClientOpt::VsCodeInsiders => &["code-insiders"], |
30 | ClientOpt::VsCodium => &["codium"], | 32 | ClientOpt::VsCodium => &["codium"], |
31 | ClientOpt::VsCodeOss => &["code-oss"], | 33 | ClientOpt::VsCodeOss => &["code-oss"], |
32 | ClientOpt::Any => &["code", "code-insiders", "codium", "code-oss"], | 34 | ClientOpt::Any => &["code", "code-exploration", "code-insiders", "codium", "code-oss"], |
33 | } | 35 | } |
34 | } | 36 | } |
35 | } | 37 | } |
@@ -44,11 +46,17 @@ impl std::str::FromStr for ClientOpt { | |||
44 | type Err = anyhow::Error; | 46 | type Err = anyhow::Error; |
45 | 47 | ||
46 | fn from_str(s: &str) -> Result<Self, Self::Err> { | 48 | fn from_str(s: &str) -> Result<Self, Self::Err> { |
47 | [ClientOpt::VsCode, ClientOpt::VsCodeInsiders, ClientOpt::VsCodium, ClientOpt::VsCodeOss] | 49 | [ |
48 | .iter() | 50 | ClientOpt::VsCode, |
49 | .copied() | 51 | ClientOpt::VsCodeExploration, |
50 | .find(|c| [s] == c.as_cmds()) | 52 | ClientOpt::VsCodeInsiders, |
51 | .ok_or_else(|| anyhow::format_err!("no such client")) | 53 | ClientOpt::VsCodium, |
54 | ClientOpt::VsCodeOss, | ||
55 | ] | ||
56 | .iter() | ||
57 | .copied() | ||
58 | .find(|c| [s] == c.as_cmds()) | ||
59 | .ok_or_else(|| anyhow::format_err!("no such client")) | ||
52 | } | 60 | } |
53 | } | 61 | } |
54 | 62 | ||
diff --git a/xtask/src/main.rs b/xtask/src/main.rs index 536a67047..3e07daae9 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs | |||
@@ -47,7 +47,7 @@ USAGE: | |||
47 | 47 | ||
48 | FLAGS: | 48 | FLAGS: |
49 | --client[=CLIENT] Install only VS Code plugin. | 49 | --client[=CLIENT] Install only VS Code plugin. |
50 | CLIENT is one of 'code', 'code-insiders', 'codium', or 'code-oss' | 50 | CLIENT is one of 'code', 'code-exploration', 'code-insiders', 'codium', or 'code-oss' |
51 | --server Install only the language server | 51 | --server Install only the language server |
52 | --mimalloc Use mimalloc for server | 52 | --mimalloc Use mimalloc for server |
53 | -h, --help Prints help information | 53 | -h, --help Prints help information |
diff --git a/xtask/src/release.rs b/xtask/src/release.rs index 3cf0d849f..2d716253e 100644 --- a/xtask/src/release.rs +++ b/xtask/src/release.rs | |||
@@ -54,9 +54,10 @@ https://github.com/sponsors/rust-analyzer[GitHub Sponsors]. | |||
54 | 54 | ||
55 | for &adoc in [ | 55 | for &adoc in [ |
56 | "manual.adoc", | 56 | "manual.adoc", |
57 | "generated_features.adoc", | ||
58 | "generated_assists.adoc", | 57 | "generated_assists.adoc", |
58 | "generated_config.adoc", | ||
59 | "generated_diagnostic.adoc", | 59 | "generated_diagnostic.adoc", |
60 | "generated_features.adoc", | ||
60 | ] | 61 | ] |
61 | .iter() | 62 | .iter() |
62 | { | 63 | { |