aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/hir/src/lib.rs2
-rw-r--r--crates/hir/src/semantics.rs76
-rw-r--r--crates/hir_def/src/body.rs4
-rw-r--r--crates/hir_def/src/body/tests.rs28
-rw-r--r--crates/hir_def/src/test_db.rs9
-rw-r--r--crates/hir_expand/src/builtin_macro.rs42
-rw-r--r--crates/hir_expand/src/lib.rs70
-rw-r--r--crates/ide/src/diagnostics/fixes.rs5
-rw-r--r--crates/ide/src/display/navigation_target.rs23
-rw-r--r--crates/proc_macro_srv/src/lib.rs3
-rw-r--r--crates/rust-analyzer/Cargo.toml2
-rw-r--r--crates/rust-analyzer/src/bin/args.rs5
-rw-r--r--crates/rust-analyzer/src/bin/main.rs3
-rw-r--r--crates/rust-analyzer/src/cli/analysis_stats.rs4
-rw-r--r--crates/rust-analyzer/src/config.rs469
-rw-r--r--crates/test_utils/src/lib.rs26
16 files changed, 553 insertions, 218 deletions
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index c7c7377d7..302a52491 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -39,7 +39,7 @@ pub use crate::{
39 Struct, Trait, Type, TypeAlias, TypeParam, Union, VariantDef, 39 Struct, Trait, Type, TypeAlias, TypeParam, Union, VariantDef,
40 }, 40 },
41 has_source::HasSource, 41 has_source::HasSource,
42 semantics::{original_range, PathResolution, Semantics, SemanticsScope}, 42 semantics::{PathResolution, Semantics, SemanticsScope},
43}; 43};
44 44
45pub use hir_def::{ 45pub use hir_def::{
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index c61a430e1..4315ad48b 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -13,10 +13,7 @@ use hir_expand::{hygiene::Hygiene, name::AsName, ExpansionInfo};
13use hir_ty::associated_type_shorthand_candidates; 13use hir_ty::associated_type_shorthand_candidates;
14use itertools::Itertools; 14use itertools::Itertools;
15use rustc_hash::{FxHashMap, FxHashSet}; 15use rustc_hash::{FxHashMap, FxHashSet};
16use syntax::{ 16use syntax::{algo::find_node_at_offset, ast, AstNode, SyntaxNode, SyntaxToken, TextSize};
17 algo::{find_node_at_offset, skip_trivia_token},
18 ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextSize,
19};
20 17
21use crate::{ 18use crate::{
22 code_model::Access, 19 code_model::Access,
@@ -25,7 +22,7 @@ use crate::{
25 semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, 22 semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
26 source_analyzer::{resolve_hir_path, SourceAnalyzer}, 23 source_analyzer::{resolve_hir_path, SourceAnalyzer},
27 AssocItem, Callable, Crate, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, 24 AssocItem, Callable, Crate, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef,
28 Module, ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, VariantDef, 25 Module, ModuleDef, Name, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, VariantDef,
29}; 26};
30 27
31#[derive(Debug, Clone, PartialEq, Eq)] 28#[derive(Debug, Clone, PartialEq, Eq)]
@@ -372,7 +369,7 @@ impl<'db> SemanticsImpl<'db> {
372 369
373 fn original_range(&self, node: &SyntaxNode) -> FileRange { 370 fn original_range(&self, node: &SyntaxNode) -> FileRange {
374 let node = self.find_file(node.clone()); 371 let node = self.find_file(node.clone());
375 original_range(self.db, node.as_ref()) 372 node.as_ref().original_file_range(self.db.upcast())
376 } 373 }
377 374
378 fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange { 375 fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
@@ -380,7 +377,7 @@ impl<'db> SemanticsImpl<'db> {
380 let root = self.db.parse_or_expand(src.file_id).unwrap(); 377 let root = self.db.parse_or_expand(src.file_id).unwrap();
381 let node = src.value.to_node(&root); 378 let node = src.value.to_node(&root);
382 self.cache(root, src.file_id); 379 self.cache(root, src.file_id);
383 original_range(self.db, src.with_value(&node)) 380 src.with_value(&node).original_file_range(self.db.upcast())
384 } 381 }
385 382
386 fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { 383 fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
@@ -771,68 +768,3 @@ impl<'a> SemanticsScope<'a> {
771 resolve_hir_path(self.db, &self.resolver, &path) 768 resolve_hir_path(self.db, &self.resolver, &path)
772 } 769 }
773} 770}
774
775// FIXME: Change `HasSource` trait to work with `Semantics` and remove this?
776pub fn original_range(db: &dyn HirDatabase, node: InFile<&SyntaxNode>) -> FileRange {
777 if let Some(range) = original_range_opt(db, node) {
778 let original_file = range.file_id.original_file(db.upcast());
779 if range.file_id == original_file.into() {
780 return FileRange { file_id: original_file, range: range.value };
781 }
782
783 log::error!("Fail to mapping up more for {:?}", range);
784 return FileRange { file_id: range.file_id.original_file(db.upcast()), range: range.value };
785 }
786
787 // Fall back to whole macro call
788 if let Some(expansion) = node.file_id.expansion_info(db.upcast()) {
789 if let Some(call_node) = expansion.call_node() {
790 return FileRange {
791 file_id: call_node.file_id.original_file(db.upcast()),
792 range: call_node.value.text_range(),
793 };
794 }
795 }
796
797 FileRange { file_id: node.file_id.original_file(db.upcast()), range: node.value.text_range() }
798}
799
800fn original_range_opt(
801 db: &dyn HirDatabase,
802 node: InFile<&SyntaxNode>,
803) -> Option<InFile<TextRange>> {
804 let expansion = node.file_id.expansion_info(db.upcast())?;
805
806 // the input node has only one token ?
807 let single = skip_trivia_token(node.value.first_token()?, Direction::Next)?
808 == skip_trivia_token(node.value.last_token()?, Direction::Prev)?;
809
810 Some(node.value.descendants().find_map(|it| {
811 let first = skip_trivia_token(it.first_token()?, Direction::Next)?;
812 let first = ascend_call_token(db, &expansion, node.with_value(first))?;
813
814 let last = skip_trivia_token(it.last_token()?, Direction::Prev)?;
815 let last = ascend_call_token(db, &expansion, node.with_value(last))?;
816
817 if (!single && first == last) || (first.file_id != last.file_id) {
818 return None;
819 }
820
821 Some(first.with_value(first.value.text_range().cover(last.value.text_range())))
822 })?)
823}
824
825fn ascend_call_token(
826 db: &dyn HirDatabase,
827 expansion: &ExpansionInfo,
828 token: InFile<SyntaxToken>,
829) -> Option<InFile<SyntaxToken>> {
830 let (mapped, origin) = expansion.map_token_up(token.as_ref())?;
831 if origin != Origin::Call {
832 return None;
833 }
834 if let Some(info) = mapped.file_id.expansion_info(db.upcast()) {
835 return ascend_call_token(db, &info, mapped);
836 }
837 Some(mapped)
838}
diff --git a/crates/hir_def/src/body.rs b/crates/hir_def/src/body.rs
index 92bcc1705..c5d6f5fb0 100644
--- a/crates/hir_def/src/body.rs
+++ b/crates/hir_def/src/body.rs
@@ -103,8 +103,7 @@ impl Expander {
103 local_scope: Option<&ItemScope>, 103 local_scope: Option<&ItemScope>,
104 macro_call: ast::MacroCall, 104 macro_call: ast::MacroCall,
105 ) -> ExpandResult<Option<(Mark, T)>> { 105 ) -> ExpandResult<Option<(Mark, T)>> {
106 self.recursion_limit += 1; 106 if self.recursion_limit + 1 > EXPANSION_RECURSION_LIMIT {
107 if self.recursion_limit > EXPANSION_RECURSION_LIMIT {
108 mark::hit!(your_stack_belongs_to_me); 107 mark::hit!(your_stack_belongs_to_me);
109 return ExpandResult::str_err("reached recursion limit during macro expansion".into()); 108 return ExpandResult::str_err("reached recursion limit during macro expansion".into());
110 } 109 }
@@ -165,6 +164,7 @@ impl Expander {
165 164
166 log::debug!("macro expansion {:#?}", node.syntax()); 165 log::debug!("macro expansion {:#?}", node.syntax());
167 166
167 self.recursion_limit += 1;
168 let mark = Mark { 168 let mark = Mark {
169 file_id: self.current_file_id, 169 file_id: self.current_file_id,
170 ast_id_map: mem::take(&mut self.ast_id_map), 170 ast_id_map: mem::take(&mut self.ast_id_map),
diff --git a/crates/hir_def/src/body/tests.rs b/crates/hir_def/src/body/tests.rs
index 6dba9817d..de77d5fc9 100644
--- a/crates/hir_def/src/body/tests.rs
+++ b/crates/hir_def/src/body/tests.rs
@@ -134,3 +134,31 @@ fn f() {
134 "#, 134 "#,
135 ); 135 );
136} 136}
137
138#[test]
139fn dollar_crate_in_builtin_macro() {
140 check_diagnostics(
141 r#"
142#[macro_export]
143#[rustc_builtin_macro]
144macro_rules! format_args {}
145
146#[macro_export]
147macro_rules! arg {
148 () => {}
149}
150
151#[macro_export]
152macro_rules! outer {
153 () => {
154 $crate::format_args!( "", $crate::arg!(1) )
155 };
156}
157
158fn f() {
159 outer!();
160 //^^^^^^^^ leftover tokens
161}
162 "#,
163 )
164}
diff --git a/crates/hir_def/src/test_db.rs b/crates/hir_def/src/test_db.rs
index 00fe711fe..f8b150850 100644
--- a/crates/hir_def/src/test_db.rs
+++ b/crates/hir_def/src/test_db.rs
@@ -157,11 +157,12 @@ impl TestDB {
157 db.diagnostics(|d| { 157 db.diagnostics(|d| {
158 let src = d.display_source(); 158 let src = d.display_source();
159 let root = db.parse_or_expand(src.file_id).unwrap(); 159 let root = db.parse_or_expand(src.file_id).unwrap();
160 // FIXME: macros... 160
161 let file_id = src.file_id.original_file(db); 161 let node = src.map(|ptr| ptr.to_node(&root));
162 let range = src.value.to_node(&root).text_range(); 162 let frange = node.as_ref().original_file_range(db);
163
163 let message = d.message().to_owned(); 164 let message = d.message().to_owned();
164 actual.entry(file_id).or_default().push((range, message)); 165 actual.entry(frange.file_id).or_default().push((frange.range, message));
165 }); 166 });
166 167
167 for (file_id, diags) in actual.iter_mut() { 168 for (file_id, diags) in actual.iter_mut() {
diff --git a/crates/hir_expand/src/builtin_macro.rs b/crates/hir_expand/src/builtin_macro.rs
index 44a5556b6..79b970850 100644
--- a/crates/hir_expand/src/builtin_macro.rs
+++ b/crates/hir_expand/src/builtin_macro.rs
@@ -287,23 +287,34 @@ fn concat_expand(
287 _arg_id: EagerMacroId, 287 _arg_id: EagerMacroId,
288 tt: &tt::Subtree, 288 tt: &tt::Subtree,
289) -> ExpandResult<Option<(tt::Subtree, FragmentKind)>> { 289) -> ExpandResult<Option<(tt::Subtree, FragmentKind)>> {
290 let mut err = None;
290 let mut text = String::new(); 291 let mut text = String::new();
291 for (i, t) in tt.token_trees.iter().enumerate() { 292 for (i, t) in tt.token_trees.iter().enumerate() {
292 match t { 293 match t {
293 tt::TokenTree::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => { 294 tt::TokenTree::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => {
294 text += &match unquote_str(&it) { 295 // concat works with string and char literals, so remove any quotes.
295 Some(s) => s, 296 // It also works with integer, float and boolean literals, so just use the rest
296 None => { 297 // as-is.
297 return ExpandResult::only_err(mbe::ExpandError::ConversionError); 298
298 } 299 text += it
299 }; 300 .text
301 .trim_start_matches(|c| match c {
302 'r' | '#' | '\'' | '"' => true,
303 _ => false,
304 })
305 .trim_end_matches(|c| match c {
306 '#' | '\'' | '"' => true,
307 _ => false,
308 });
300 } 309 }
301 tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), 310 tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
302 _ => return ExpandResult::only_err(mbe::ExpandError::UnexpectedToken), 311 _ => {
312 err.get_or_insert(mbe::ExpandError::UnexpectedToken);
313 }
303 } 314 }
304 } 315 }
305 316
306 ExpandResult::ok(Some((quote!(#text), FragmentKind::Expr))) 317 ExpandResult { value: Some((quote!(#text), FragmentKind::Expr)), err }
307} 318}
308 319
309fn relative_file( 320fn relative_file(
@@ -686,4 +697,19 @@ mod tests {
686 697
687 assert_eq!(expanded, r#"b"""#); 698 assert_eq!(expanded, r#"b"""#);
688 } 699 }
700
701 #[test]
702 fn test_concat_expand() {
703 let expanded = expand_builtin_macro(
704 r##"
705 #[rustc_builtin_macro]
706 macro_rules! concat {}
707 concat!("foo", 0, r#"bar"#);
708 "##,
709 );
710
711 assert_eq!(expanded, r#""foo0bar""#);
712
713 // FIXME: `true`/`false` literals don't work.
714 }
689} 715}
diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs
index 2633fd8f7..1a9428514 100644
--- a/crates/hir_expand/src/lib.rs
+++ b/crates/hir_expand/src/lib.rs
@@ -20,11 +20,11 @@ pub use mbe::{ExpandError, ExpandResult};
20use std::hash::Hash; 20use std::hash::Hash;
21use std::sync::Arc; 21use std::sync::Arc;
22 22
23use base_db::{impl_intern_key, salsa, CrateId, FileId}; 23use base_db::{impl_intern_key, salsa, CrateId, FileId, FileRange};
24use syntax::{ 24use syntax::{
25 algo, 25 algo::{self, skip_trivia_token},
26 ast::{self, AstNode}, 26 ast::{self, AstNode},
27 SyntaxNode, SyntaxToken, TextSize, 27 Direction, SyntaxNode, SyntaxToken, TextRange, TextSize,
28}; 28};
29 29
30use crate::ast_id_map::FileAstId; 30use crate::ast_id_map::FileAstId;
@@ -445,6 +445,70 @@ impl InFile<SyntaxNode> {
445 } 445 }
446} 446}
447 447
448impl<'a> InFile<&'a SyntaxNode> {
449 pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange {
450 if let Some(range) = original_range_opt(db, self) {
451 let original_file = range.file_id.original_file(db);
452 if range.file_id == original_file.into() {
453 return FileRange { file_id: original_file, range: range.value };
454 }
455
456 log::error!("Fail to mapping up more for {:?}", range);
457 return FileRange { file_id: range.file_id.original_file(db), range: range.value };
458 }
459
460 // Fall back to whole macro call.
461 let mut node = self.cloned();
462 while let Some(call_node) = node.file_id.call_node(db) {
463 node = call_node;
464 }
465
466 let orig_file = node.file_id.original_file(db);
467 assert_eq!(node.file_id, orig_file.into());
468 FileRange { file_id: orig_file, range: node.value.text_range() }
469 }
470}
471
472fn original_range_opt(
473 db: &dyn db::AstDatabase,
474 node: InFile<&SyntaxNode>,
475) -> Option<InFile<TextRange>> {
476 let expansion = node.file_id.expansion_info(db)?;
477
478 // the input node has only one token ?
479 let single = skip_trivia_token(node.value.first_token()?, Direction::Next)?
480 == skip_trivia_token(node.value.last_token()?, Direction::Prev)?;
481
482 Some(node.value.descendants().find_map(|it| {
483 let first = skip_trivia_token(it.first_token()?, Direction::Next)?;
484 let first = ascend_call_token(db, &expansion, node.with_value(first))?;
485
486 let last = skip_trivia_token(it.last_token()?, Direction::Prev)?;
487 let last = ascend_call_token(db, &expansion, node.with_value(last))?;
488
489 if (!single && first == last) || (first.file_id != last.file_id) {
490 return None;
491 }
492
493 Some(first.with_value(first.value.text_range().cover(last.value.text_range())))
494 })?)
495}
496
497fn ascend_call_token(
498 db: &dyn db::AstDatabase,
499 expansion: &ExpansionInfo,
500 token: InFile<SyntaxToken>,
501) -> Option<InFile<SyntaxToken>> {
502 let (mapped, origin) = expansion.map_token_up(token.as_ref())?;
503 if origin != Origin::Call {
504 return None;
505 }
506 if let Some(info) = mapped.file_id.expansion_info(db) {
507 return ascend_call_token(db, &info, mapped);
508 }
509 Some(mapped)
510}
511
448impl InFile<SyntaxToken> { 512impl InFile<SyntaxToken> {
449 pub fn ancestors_with_macros( 513 pub fn ancestors_with_macros(
450 self, 514 self,
diff --git a/crates/ide/src/diagnostics/fixes.rs b/crates/ide/src/diagnostics/fixes.rs
index d275dd75b..24d08651e 100644
--- a/crates/ide/src/diagnostics/fixes.rs
+++ b/crates/ide/src/diagnostics/fixes.rs
@@ -68,7 +68,8 @@ impl DiagnosticWithFix for MissingFields {
68 } 68 }
69 69
70 let root = sema.db.parse_or_expand(self.file)?; 70 let root = sema.db.parse_or_expand(self.file)?;
71 let old_field_list = self.field_list_parent.to_node(&root).record_expr_field_list()?; 71 let field_list_parent = self.field_list_parent.to_node(&root);
72 let old_field_list = field_list_parent.record_expr_field_list()?;
72 let mut new_field_list = old_field_list.clone(); 73 let mut new_field_list = old_field_list.clone();
73 for f in self.missed_fields.iter() { 74 for f in self.missed_fields.iter() {
74 let field = 75 let field =
@@ -85,7 +86,7 @@ impl DiagnosticWithFix for MissingFields {
85 Some(Fix::new( 86 Some(Fix::new(
86 "Fill struct fields", 87 "Fill struct fields",
87 SourceFileEdit { file_id: self.file.original_file(sema.db), edit }.into(), 88 SourceFileEdit { file_id: self.file.original_file(sema.db), edit }.into(),
88 sema.original_range(&old_field_list.syntax()).range, 89 sema.original_range(&field_list_parent.syntax()).range,
89 )) 90 ))
90 } 91 }
91} 92}
diff --git a/crates/ide/src/display/navigation_target.rs b/crates/ide/src/display/navigation_target.rs
index 0c429a262..4790d648a 100644
--- a/crates/ide/src/display/navigation_target.rs
+++ b/crates/ide/src/display/navigation_target.rs
@@ -1,7 +1,7 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use either::Either; 3use either::Either;
4use hir::{original_range, AssocItem, FieldSource, HasSource, InFile, ModuleSource}; 4use hir::{AssocItem, FieldSource, HasSource, InFile, ModuleSource};
5use ide_db::base_db::{FileId, SourceDatabase}; 5use ide_db::base_db::{FileId, SourceDatabase};
6use ide_db::{defs::Definition, RootDatabase}; 6use ide_db::{defs::Definition, RootDatabase};
7use syntax::{ 7use syntax::{
@@ -62,7 +62,8 @@ impl NavigationTarget {
62 pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget { 62 pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget {
63 let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default(); 63 let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default();
64 if let Some(src) = module.declaration_source(db) { 64 if let Some(src) = module.declaration_source(db) {
65 let frange = original_range(db, src.as_ref().map(|it| it.syntax())); 65 let node = src.as_ref().map(|it| it.syntax());
66 let frange = node.original_file_range(db);
66 let mut res = NavigationTarget::from_syntax( 67 let mut res = NavigationTarget::from_syntax(
67 frange.file_id, 68 frange.file_id,
68 name, 69 name,
@@ -104,8 +105,8 @@ impl NavigationTarget {
104 let name = 105 let name =
105 node.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_")); 106 node.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_"));
106 let focus_range = 107 let focus_range =
107 node.value.name().map(|it| original_range(db, node.with_value(it.syntax())).range); 108 node.value.name().map(|it| node.with_value(it.syntax()).original_file_range(db).range);
108 let frange = original_range(db, node.map(|it| it.syntax())); 109 let frange = node.map(|it| it.syntax()).original_file_range(db);
109 110
110 NavigationTarget::from_syntax( 111 NavigationTarget::from_syntax(
111 frange.file_id, 112 frange.file_id,
@@ -124,7 +125,7 @@ impl NavigationTarget {
124 ) -> NavigationTarget { 125 ) -> NavigationTarget {
125 let name = 126 let name =
126 named.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_")); 127 named.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_"));
127 let frange = original_range(db, node.map(|it| it.syntax())); 128 let frange = node.map(|it| it.syntax()).original_file_range(db);
128 129
129 NavigationTarget::from_syntax( 130 NavigationTarget::from_syntax(
130 frange.file_id, 131 frange.file_id,
@@ -236,7 +237,7 @@ impl ToNav for hir::Module {
236 (node.syntax(), node.name().map(|it| it.syntax().text_range())) 237 (node.syntax(), node.name().map(|it| it.syntax().text_range()))
237 } 238 }
238 }; 239 };
239 let frange = original_range(db, src.with_value(syntax)); 240 let frange = src.with_value(syntax).original_file_range(db);
240 NavigationTarget::from_syntax(frange.file_id, name, focus, frange.range, syntax.kind()) 241 NavigationTarget::from_syntax(frange.file_id, name, focus, frange.range, syntax.kind())
241 } 242 }
242} 243}
@@ -246,14 +247,14 @@ impl ToNav for hir::ImplDef {
246 let src = self.source(db); 247 let src = self.source(db);
247 let derive_attr = self.is_builtin_derive(db); 248 let derive_attr = self.is_builtin_derive(db);
248 let frange = if let Some(item) = &derive_attr { 249 let frange = if let Some(item) = &derive_attr {
249 original_range(db, item.syntax()) 250 item.syntax().original_file_range(db)
250 } else { 251 } else {
251 original_range(db, src.as_ref().map(|it| it.syntax())) 252 src.as_ref().map(|it| it.syntax()).original_file_range(db)
252 }; 253 };
253 let focus_range = if derive_attr.is_some() { 254 let focus_range = if derive_attr.is_some() {
254 None 255 None
255 } else { 256 } else {
256 src.value.self_ty().map(|ty| original_range(db, src.with_value(ty.syntax())).range) 257 src.value.self_ty().map(|ty| src.with_value(ty.syntax()).original_file_range(db).range)
257 }; 258 };
258 259
259 NavigationTarget::from_syntax( 260 NavigationTarget::from_syntax(
@@ -278,7 +279,7 @@ impl ToNav for hir::Field {
278 res 279 res
279 } 280 }
280 FieldSource::Pos(it) => { 281 FieldSource::Pos(it) => {
281 let frange = original_range(db, src.with_value(it.syntax())); 282 let frange = src.with_value(it.syntax()).original_file_range(db);
282 NavigationTarget::from_syntax( 283 NavigationTarget::from_syntax(
283 frange.file_id, 284 frange.file_id,
284 "".into(), 285 "".into(),
@@ -331,7 +332,7 @@ impl ToNav for hir::Local {
331 } 332 }
332 Either::Right(it) => it.syntax().clone(), 333 Either::Right(it) => it.syntax().clone(),
333 }; 334 };
334 let full_range = original_range(db, src.with_value(&node)); 335 let full_range = src.with_value(&node).original_file_range(db);
335 let name = match self.name(db) { 336 let name = match self.name(db) {
336 Some(it) => it.to_string().into(), 337 Some(it) => it.to_string().into(),
337 None => "".into(), 338 None => "".into(),
diff --git a/crates/proc_macro_srv/src/lib.rs b/crates/proc_macro_srv/src/lib.rs
index 6e890f8e2..9cca96994 100644
--- a/crates/proc_macro_srv/src/lib.rs
+++ b/crates/proc_macro_srv/src/lib.rs
@@ -40,7 +40,8 @@ impl ProcMacroSrv {
40 match expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref()) { 40 match expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref()) {
41 Ok(expansion) => Ok(ExpansionResult { expansion }), 41 Ok(expansion) => Ok(ExpansionResult { expansion }),
42 Err(msg) => { 42 Err(msg) => {
43 Err(format!("Cannot perform expansion for {}: error {:?}", &task.macro_name, msg)) 43 let msg = msg.as_str().unwrap_or("<unknown error>");
44 Err(format!("proc-macro panicked: {}", msg))
44 } 45 }
45 } 46 }
46 } 47 }
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index 0a055b039..039976e4b 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -27,7 +27,7 @@ pico-args = "0.3.1"
27oorandom = "11.1.2" 27oorandom = "11.1.2"
28rustc-hash = "1.1.0" 28rustc-hash = "1.1.0"
29serde = { version = "1.0.106", features = ["derive"] } 29serde = { version = "1.0.106", features = ["derive"] }
30serde_json = "1.0.48" 30serde_json = { version = "1.0.48", features = ["preserve_order"] }
31threadpool = "1.7.1" 31threadpool = "1.7.1"
32rayon = "1.5" 32rayon = "1.5"
33mimalloc = { version = "0.1.19", default-features = false, optional = true } 33mimalloc = { version = "0.1.19", default-features = false, optional = true }
diff --git a/crates/rust-analyzer/src/bin/args.rs b/crates/rust-analyzer/src/bin/args.rs
index 8ddf1e031..0a471154e 100644
--- a/crates/rust-analyzer/src/bin/args.rs
+++ b/crates/rust-analyzer/src/bin/args.rs
@@ -28,6 +28,7 @@ pub(crate) enum Command {
28 StructuredSearch { debug_snippet: Option<String>, patterns: Vec<SsrPattern> }, 28 StructuredSearch { debug_snippet: Option<String>, patterns: Vec<SsrPattern> },
29 ProcMacro, 29 ProcMacro,
30 RunServer, 30 RunServer,
31 PrintConfigSchema,
31 Version, 32 Version,
32 Help, 33 Help,
33} 34}
@@ -135,6 +136,10 @@ impl Args {
135 return Ok(Args { verbosity, log_file: None, command: Command::Help }); 136 return Ok(Args { verbosity, log_file: None, command: Command::Help });
136 } 137 }
137 138
139 if matches.contains("--print-config-schema") {
140 return Ok(Args { verbosity, log_file, command: Command::PrintConfigSchema });
141 }
142
138 let subcommand = match matches.subcommand()? { 143 let subcommand = match matches.subcommand()? {
139 Some(it) => it, 144 Some(it) => it,
140 None => { 145 None => {
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs
index 21fba8302..defdcbd74 100644
--- a/crates/rust-analyzer/src/bin/main.rs
+++ b/crates/rust-analyzer/src/bin/main.rs
@@ -31,6 +31,9 @@ fn try_main() -> Result<()> {
31 setup_logging(args.log_file)?; 31 setup_logging(args.log_file)?;
32 match args.command { 32 match args.command {
33 args::Command::RunServer => run_server()?, 33 args::Command::RunServer => run_server()?,
34 args::Command::PrintConfigSchema => {
35 println!("{:#}", Config::json_schema());
36 }
34 args::Command::ProcMacro => proc_macro_srv::cli::run()?, 37 args::Command::ProcMacro => proc_macro_srv::cli::run()?,
35 38
36 args::Command::Parse { no_dump } => cli::parse(no_dump)?, 39 args::Command::Parse { no_dump } => cli::parse(no_dump)?,
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index 98ef0cd68..58d284d47 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -8,7 +8,7 @@ use std::{
8 8
9use hir::{ 9use hir::{
10 db::{AstDatabase, DefDatabase, HirDatabase}, 10 db::{AstDatabase, DefDatabase, HirDatabase},
11 original_range, AssocItem, Crate, HasSource, HirDisplay, ModuleDef, 11 AssocItem, Crate, HasSource, HirDisplay, ModuleDef,
12}; 12};
13use hir_def::FunctionId; 13use hir_def::FunctionId;
14use hir_ty::{Ty, TypeWalk}; 14use hir_ty::{Ty, TypeWalk};
@@ -232,7 +232,7 @@ impl AnalysisStatsCmd {
232 // But also, we should just turn the type mismatches into diagnostics and provide these 232 // But also, we should just turn the type mismatches into diagnostics and provide these
233 let root = db.parse_or_expand(src.file_id).unwrap(); 233 let root = db.parse_or_expand(src.file_id).unwrap();
234 let node = src.map(|e| e.to_node(&root).syntax().clone()); 234 let node = src.map(|e| e.to_node(&root).syntax().clone());
235 let original_range = original_range(db, node.as_ref()); 235 let original_range = node.as_ref().original_file_range(db);
236 let path = vfs.file_path(original_range.file_id); 236 let path = vfs.file_path(original_range.file_id);
237 let line_index = 237 let line_index =
238 host.analysis().file_line_index(original_range.file_id).unwrap(); 238 host.analysis().file_line_index(original_range.file_id).unwrap();
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 5243b50c8..bd41a971b 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -13,14 +13,166 @@ use flycheck::FlycheckConfig;
13use hir::PrefixKind; 13use hir::PrefixKind;
14use ide::{AssistConfig, CompletionConfig, DiagnosticsConfig, HoverConfig, InlayHintsConfig}; 14use ide::{AssistConfig, CompletionConfig, DiagnosticsConfig, HoverConfig, InlayHintsConfig};
15use ide_db::helpers::insert_use::MergeBehaviour; 15use ide_db::helpers::insert_use::MergeBehaviour;
16use itertools::Itertools;
16use lsp_types::{ClientCapabilities, MarkupKind}; 17use lsp_types::{ClientCapabilities, MarkupKind};
17use project_model::{CargoConfig, ProjectJson, ProjectJsonData, ProjectManifest}; 18use project_model::{CargoConfig, ProjectJson, ProjectJsonData, ProjectManifest};
18use rustc_hash::FxHashSet; 19use rustc_hash::FxHashSet;
19use serde::Deserialize; 20use serde::{de::DeserializeOwned, Deserialize};
20use vfs::AbsPathBuf; 21use vfs::AbsPathBuf;
21 22
22use crate::{caps::enabled_completions_resolve_capabilities, diagnostics::DiagnosticsMapConfig}; 23use crate::{caps::enabled_completions_resolve_capabilities, diagnostics::DiagnosticsMapConfig};
23 24
25config_data! {
26 struct ConfigData {
27 /// The strategy to use when inserting new imports or merging imports.
28 assist_importMergeBehaviour: MergeBehaviourDef = "\"full\"",
29 /// The path structure for newly inserted paths to use.
30 assist_importPrefix: ImportPrefixDef = "\"plain\"",
31
32 /// Show function name and docs in parameter hints.
33 callInfo_full: bool = "true",
34
35 /// Automatically refresh project info via `cargo metadata` on
36 /// Cargo.toml changes.
37 cargo_autoreload: bool = "true",
38 /// Activate all available features.
39 cargo_allFeatures: bool = "false",
40 /// List of features to activate.
41 cargo_features: Vec<String> = "[]",
42 /// Run `cargo check` on startup to get the correct value for package
43 /// OUT_DIRs.
44 cargo_loadOutDirsFromCheck: bool = "false",
45 /// Do not activate the `default` feature.
46 cargo_noDefaultFeatures: bool = "false",
47 /// Compilation target (target triple).
48 cargo_target: Option<String> = "null",
49 /// Internal config for debugging, disables loading of sysroot crates.
50 cargo_noSysroot: bool = "false",
51
52 /// Run specified `cargo check` command for diagnostics on save.
53 checkOnSave_enable: bool = "true",
54 /// Check with all features (will be passed as `--all-features`).
55 /// Defaults to `rust-analyzer.cargo.allFeatures`.
56 checkOnSave_allFeatures: Option<bool> = "null",
57 /// Check all targets and tests (will be passed as `--all-targets`).
58 checkOnSave_allTargets: bool = "true",
59 /// Cargo command to use for `cargo check`.
60 checkOnSave_command: String = "\"check\"",
61 /// Do not activate the `default` feature.
62 checkOnSave_noDefaultFeatures: Option<bool> = "null",
63 /// Check for a specific target. Defaults to
64 /// `rust-analyzer.cargo.target`.
65 checkOnSave_target: Option<String> = "null",
66 /// Extra arguments for `cargo check`.
67 checkOnSave_extraArgs: Vec<String> = "[]",
68 /// List of features to activate. Defaults to
69 /// `rust-analyzer.cargo.features`.
70 checkOnSave_features: Option<Vec<String>> = "null",
71 /// Advanced option, fully override the command rust-analyzer uses for
72 /// checking. The command should include `--message-format=json` or
73 /// similar option.
74 checkOnSave_overrideCommand: Option<Vec<String>> = "null",
75
76 /// Whether to add argument snippets when completing functions.
77 completion_addCallArgumentSnippets: bool = "true",
78 /// Whether to add parenthesis when completing functions.
79 completion_addCallParenthesis: bool = "true",
80 /// Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
81 completion_postfix_enable: bool = "true",
82 /// Toggles the additional completions that automatically add imports when completed.
83 /// Note that your client have to specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
84 completion_autoimport_enable: bool = "true",
85
86 /// Whether to show native rust-analyzer diagnostics.
87 diagnostics_enable: bool = "true",
88 /// Whether to show experimental rust-analyzer diagnostics that might
89 /// have more false positives than usual.
90 diagnostics_enableExperimental: bool = "true",
91 /// List of rust-analyzer diagnostics to disable.
92 diagnostics_disabled: FxHashSet<String> = "[]",
93 /// List of warnings that should be displayed with info severity.\nThe
94 /// warnings will be indicated by a blue squiggly underline in code and
95 /// a blue icon in the problems panel.
96 diagnostics_warningsAsHint: Vec<String> = "[]",
97 /// List of warnings that should be displayed with hint severity.\nThe
98 /// warnings will be indicated by faded text or three dots in code and
99 /// will not show up in the problems panel.
100 diagnostics_warningsAsInfo: Vec<String> = "[]",
101
102 /// Controls file watching implementation.
103 files_watcher: String = "\"client\"",
104
105 /// Whether to show `Debug` action. Only applies when
106 /// `#rust-analyzer.hoverActions.enable#` is set.
107 hoverActions_debug: bool = "true",
108 /// Whether to show HoverActions in Rust files.
109 hoverActions_enable: bool = "true",
110 /// Whether to show `Go to Type Definition` action. Only applies when
111 /// `#rust-analyzer.hoverActions.enable#` is set.
112 hoverActions_gotoTypeDef: bool = "true",
113 /// Whether to show `Implementations` action. Only applies when
114 /// `#rust-analyzer.hoverActions.enable#` is set.
115 hoverActions_implementations: bool = "true",
116 /// Whether to show `Run` action. Only applies when
117 /// `#rust-analyzer.hoverActions.enable#` is set.
118 hoverActions_run: bool = "true",
119 /// Use markdown syntax for links in hover.
120 hoverActions_linksInHover: bool = "true",
121
122 /// Whether to show inlay type hints for method chains.
123 inlayHints_chainingHints: bool = "true",
124 /// Maximum length for inlay hints.
125 inlayHints_maxLength: Option<usize> = "null",
126 /// Whether to show function parameter name inlay hints at the call
127 /// site.
128 inlayHints_parameterHints: bool = "true",
129 /// Whether to show inlay type hints for variables.
130 inlayHints_typeHints: bool = "true",
131
132 /// Whether to show `Debug` lens. Only applies when
133 /// `#rust-analyzer.lens.enable#` is set.
134 lens_debug: bool = "true",
135 /// Whether to show CodeLens in Rust files.
136 lens_enable: bool = "true",
137 /// Whether to show `Implementations` lens. Only applies when
138 /// `#rust-analyzer.lens.enable#` is set.
139 lens_implementations: bool = "true",
140 /// Whether to show `Run` lens. Only applies when
141 /// `#rust-analyzer.lens.enable#` is set.
142 lens_run: bool = "true",
143 /// Whether to show `Method References` lens. Only applies when
144 /// `#rust-analyzer.lens.enable#` is set.
145 lens_methodReferences: bool = "false",
146
147 /// Disable project auto-discovery in favor of explicitly specified set
148 /// of projects. \nElements must be paths pointing to Cargo.toml,
149 /// rust-project.json, or JSON objects in rust-project.json format.
150 linkedProjects: Vec<ManifestOrProjectJson> = "[]",
151 /// Number of syntax trees rust-analyzer keeps in memory.
152 lruCapacity: Option<usize> = "null",
153 /// Whether to show `can't find Cargo.toml` error message.
154 notifications_cargoTomlNotFound: bool = "true",
155 /// Enable Proc macro support, cargo.loadOutDirsFromCheck must be
156 /// enabled.
157 procMacro_enable: bool = "false",
158
159 /// Command to be executed instead of 'cargo' for runnables.
160 runnables_overrideCargo: Option<String> = "null",
161 /// Additional arguments to be passed to cargo for runnables such as
162 /// tests or binaries.\nFor example, it may be '--release'.
163 runnables_cargoExtraArgs: Vec<String> = "[]",
164
165 /// Path to the rust compiler sources, for usage in rustc_private projects.
166 rustcSource : Option<String> = "null",
167
168 /// Additional arguments to rustfmt.
169 rustfmt_extraArgs: Vec<String> = "[]",
170 /// Advanced option, fully override the command rust-analyzer uses for
171 /// formatting.
172 rustfmt_overrideCommand: Option<Vec<String>> = "null",
173 }
174}
175
24#[derive(Debug, Clone)] 176#[derive(Debug, Clone)]
25pub struct Config { 177pub struct Config {
26 pub client_caps: ClientCapsConfig, 178 pub client_caps: ClientCapsConfig,
@@ -149,25 +301,27 @@ pub struct ClientCapsConfig {
149 301
150impl Config { 302impl Config {
151 pub fn new(root_path: AbsPathBuf) -> Self { 303 pub fn new(root_path: AbsPathBuf) -> Self {
152 Config { 304 // Defaults here don't matter, we'll immediately re-write them with
305 // ConfigData.
306 let mut res = Config {
153 client_caps: ClientCapsConfig::default(), 307 client_caps: ClientCapsConfig::default(),
154 308
155 publish_diagnostics: true, 309 publish_diagnostics: false,
156 diagnostics: DiagnosticsConfig::default(), 310 diagnostics: DiagnosticsConfig::default(),
157 diagnostics_map: DiagnosticsMapConfig::default(), 311 diagnostics_map: DiagnosticsMapConfig::default(),
158 lru_capacity: None, 312 lru_capacity: None,
159 proc_macro_srv: None, 313 proc_macro_srv: None,
160 files: FilesConfig { watcher: FilesWatcher::Notify, exclude: Vec::new() }, 314 files: FilesConfig { watcher: FilesWatcher::Notify, exclude: Vec::new() },
161 notifications: NotificationsConfig { cargo_toml_not_found: true }, 315 notifications: NotificationsConfig { cargo_toml_not_found: false },
162 316
163 cargo_autoreload: true, 317 cargo_autoreload: false,
164 cargo: CargoConfig::default(), 318 cargo: CargoConfig::default(),
165 rustfmt: RustfmtConfig::Rustfmt { extra_args: Vec::new() }, 319 rustfmt: RustfmtConfig::Rustfmt { extra_args: Vec::new() },
166 flycheck: Some(FlycheckConfig::CargoCommand { 320 flycheck: Some(FlycheckConfig::CargoCommand {
167 command: "check".to_string(), 321 command: String::new(),
168 target_triple: None, 322 target_triple: None,
169 no_default_features: false, 323 no_default_features: false,
170 all_targets: true, 324 all_targets: false,
171 all_features: false, 325 all_features: false,
172 extra_args: Vec::new(), 326 extra_args: Vec::new(),
173 features: Vec::new(), 327 features: Vec::new(),
@@ -175,35 +329,32 @@ impl Config {
175 runnables: RunnablesConfig::default(), 329 runnables: RunnablesConfig::default(),
176 330
177 inlay_hints: InlayHintsConfig { 331 inlay_hints: InlayHintsConfig {
178 type_hints: true, 332 type_hints: false,
179 parameter_hints: true, 333 parameter_hints: false,
180 chaining_hints: true, 334 chaining_hints: false,
181 max_length: None, 335 max_length: None,
182 }, 336 },
183 completion: CompletionConfig { 337 completion: CompletionConfig::default(),
184 enable_postfix_completions: true,
185 enable_autoimport_completions: true,
186 add_call_parenthesis: true,
187 add_call_argument_snippets: true,
188 ..CompletionConfig::default()
189 },
190 assist: AssistConfig::default(), 338 assist: AssistConfig::default(),
191 call_info_full: true, 339 call_info_full: false,
192 lens: LensConfig::default(), 340 lens: LensConfig::default(),
193 hover: HoverConfig::default(), 341 hover: HoverConfig::default(),
194 semantic_tokens_refresh: false, 342 semantic_tokens_refresh: false,
195 linked_projects: Vec::new(), 343 linked_projects: Vec::new(),
196 root_path, 344 root_path,
197 } 345 };
346 res.do_update(serde_json::json!({}));
347 res
198 } 348 }
199
200 pub fn update(&mut self, json: serde_json::Value) { 349 pub fn update(&mut self, json: serde_json::Value) {
201 log::info!("Config::update({:#})", json); 350 log::info!("Config::update({:#})", json);
202
203 if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) { 351 if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) {
204 return; 352 return;
205 } 353 }
206 354 self.do_update(json);
355 log::info!("Config::update() = {:#?}", self);
356 }
357 fn do_update(&mut self, json: serde_json::Value) {
207 let data = ConfigData::from_json(json); 358 let data = ConfigData::from_json(json);
208 359
209 self.publish_diagnostics = data.diagnostics_enable; 360 self.publish_diagnostics = data.diagnostics_enable;
@@ -349,8 +500,6 @@ impl Config {
349 links_in_hover: data.hoverActions_linksInHover, 500 links_in_hover: data.hoverActions_linksInHover,
350 markdown: true, 501 markdown: true,
351 }; 502 };
352
353 log::info!("Config::update() = {:#?}", self);
354 } 503 }
355 504
356 pub fn update_caps(&mut self, caps: &ClientCapabilities) { 505 pub fn update_caps(&mut self, caps: &ClientCapabilities) {
@@ -434,6 +583,10 @@ impl Config {
434 } 583 }
435 } 584 }
436 } 585 }
586
587 pub fn json_schema() -> serde_json::Value {
588 ConfigData::json_schema()
589 }
437} 590}
438 591
439#[derive(Deserialize)] 592#[derive(Deserialize)]
@@ -459,94 +612,206 @@ enum ImportPrefixDef {
459 ByCrate, 612 ByCrate,
460} 613}
461 614
462macro_rules! config_data { 615macro_rules! _config_data {
463 (struct $name:ident { $($field:ident: $ty:ty = $default:expr,)*}) => { 616 (struct $name:ident {
617 $(
618 $(#[doc=$doc:literal])*
619 $field:ident: $ty:ty = $default:expr,
620 )*
621 }) => {
464 #[allow(non_snake_case)] 622 #[allow(non_snake_case)]
465 struct $name { $($field: $ty,)* } 623 struct $name { $($field: $ty,)* }
466 impl $name { 624 impl $name {
467 fn from_json(mut json: serde_json::Value) -> $name { 625 fn from_json(mut json: serde_json::Value) -> $name {
468 $name {$( 626 $name {$(
469 $field: { 627 $field: get_field(&mut json, stringify!($field), $default),
470 let pointer = stringify!($field).replace('_', "/");
471 let pointer = format!("/{}", pointer);
472 json.pointer_mut(&pointer)
473 .and_then(|it| serde_json::from_value(it.take()).ok())
474 .unwrap_or($default)
475 },
476 )*} 628 )*}
477 } 629 }
478 }
479 630
631 fn json_schema() -> serde_json::Value {
632 schema(&[
633 $({
634 let field = stringify!($field);
635 let ty = stringify!($ty);
636 (field, ty, &[$($doc),*], $default)
637 },)*
638 ])
639 }
640
641 #[cfg(test)]
642 fn manual() -> String {
643 manual(&[
644 $({
645 let field = stringify!($field);
646 let ty = stringify!($ty);
647 (field, ty, &[$($doc),*], $default)
648 },)*
649 ])
650 }
651 }
480 }; 652 };
481} 653}
654use _config_data as config_data;
655
656fn get_field<T: DeserializeOwned>(
657 json: &mut serde_json::Value,
658 field: &'static str,
659 default: &str,
660) -> T {
661 let default = serde_json::from_str(default).unwrap();
662
663 let mut pointer = field.replace('_', "/");
664 pointer.insert(0, '/');
665 json.pointer_mut(&pointer)
666 .and_then(|it| serde_json::from_value(it.take()).ok())
667 .unwrap_or(default)
668}
482 669
483config_data! { 670fn schema(fields: &[(&'static str, &'static str, &[&str], &str)]) -> serde_json::Value {
484 struct ConfigData { 671 for ((f1, ..), (f2, ..)) in fields.iter().zip(&fields[1..]) {
485 assist_importMergeBehaviour: MergeBehaviourDef = MergeBehaviourDef::Full, 672 fn key(f: &str) -> &str {
486 assist_importPrefix: ImportPrefixDef = ImportPrefixDef::Plain, 673 f.splitn(2, "_").next().unwrap()
487 674 };
488 callInfo_full: bool = true, 675 assert!(key(f1) <= key(f2), "wrong field order: {:?} {:?}", f1, f2);
489 676 }
490 cargo_autoreload: bool = true, 677
491 cargo_allFeatures: bool = false, 678 let map = fields
492 cargo_features: Vec<String> = Vec::new(), 679 .iter()
493 cargo_loadOutDirsFromCheck: bool = false, 680 .map(|(field, ty, doc, default)| {
494 cargo_noDefaultFeatures: bool = false, 681 let name = field.replace("_", ".");
495 cargo_target: Option<String> = None, 682 let name = format!("rust-analyzer.{}", name);
496 cargo_noSysroot: bool = false, 683 let props = field_props(field, ty, doc, default);
497 684 (name, props)
498 checkOnSave_enable: bool = true, 685 })
499 checkOnSave_allFeatures: Option<bool> = None, 686 .collect::<serde_json::Map<_, _>>();
500 checkOnSave_allTargets: bool = true, 687 map.into()
501 checkOnSave_command: String = "check".into(), 688}
502 checkOnSave_noDefaultFeatures: Option<bool> = None, 689
503 checkOnSave_target: Option<String> = None, 690fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json::Value {
504 checkOnSave_extraArgs: Vec<String> = Vec::new(), 691 let doc = doc.iter().map(|it| it.trim()).join(" ");
505 checkOnSave_features: Option<Vec<String>> = None, 692 assert!(
506 checkOnSave_overrideCommand: Option<Vec<String>> = None, 693 doc.ends_with('.') && doc.starts_with(char::is_uppercase),
507 694 "bad docs for {}: {:?}",
508 completion_addCallArgumentSnippets: bool = true, 695 field,
509 completion_addCallParenthesis: bool = true, 696 doc
510 completion_postfix_enable: bool = true, 697 );
511 completion_autoimport_enable: bool = true, 698 let default = default.parse::<serde_json::Value>().unwrap();
512 699
513 diagnostics_enable: bool = true, 700 let mut map = serde_json::Map::default();
514 diagnostics_enableExperimental: bool = true, 701 macro_rules! set {
515 diagnostics_disabled: FxHashSet<String> = FxHashSet::default(), 702 ($($key:literal: $value:tt),*$(,)?) => {{$(
516 diagnostics_warningsAsHint: Vec<String> = Vec::new(), 703 map.insert($key.into(), serde_json::json!($value));
517 diagnostics_warningsAsInfo: Vec<String> = Vec::new(), 704 )*}};
518 705 }
519 files_watcher: String = "client".into(), 706 set!("markdownDescription": doc);
520 707 set!("default": default);
521 hoverActions_debug: bool = true, 708
522 hoverActions_enable: bool = true, 709 match ty {
523 hoverActions_gotoTypeDef: bool = true, 710 "bool" => set!("type": "boolean"),
524 hoverActions_implementations: bool = true, 711 "String" => set!("type": "string"),
525 hoverActions_run: bool = true, 712 "Vec<String>" => set! {
526 hoverActions_linksInHover: bool = true, 713 "type": "array",
527 714 "items": { "type": "string" },
528 inlayHints_chainingHints: bool = true, 715 },
529 inlayHints_maxLength: Option<usize> = None, 716 "FxHashSet<String>" => set! {
530 inlayHints_parameterHints: bool = true, 717 "type": "array",
531 inlayHints_typeHints: bool = true, 718 "items": { "type": "string" },
532 719 "uniqueItems": true,
533 lens_debug: bool = true, 720 },
534 lens_enable: bool = true, 721 "Option<usize>" => set! {
535 lens_implementations: bool = true, 722 "type": ["null", "integer"],
536 lens_run: bool = true, 723 "minimum": 0,
537 lens_methodReferences: bool = false, 724 },
538 725 "Option<String>" => set! {
539 linkedProjects: Vec<ManifestOrProjectJson> = Vec::new(), 726 "type": ["null", "string"],
540 lruCapacity: Option<usize> = None, 727 },
541 notifications_cargoTomlNotFound: bool = true, 728 "Option<bool>" => set! {
542 procMacro_enable: bool = false, 729 "type": ["null", "boolean"],
543 730 },
544 runnables_overrideCargo: Option<String> = None, 731 "Option<Vec<String>>" => set! {
545 runnables_cargoExtraArgs: Vec<String> = Vec::new(), 732 "type": ["null", "array"],
546 733 "items": { "type": "string" },
547 rustfmt_extraArgs: Vec<String> = Vec::new(), 734 },
548 rustfmt_overrideCommand: Option<Vec<String>> = None, 735 "MergeBehaviourDef" => set! {
549 736 "type": "string",
550 rustcSource : Option<String> = None, 737 "enum": ["none", "full", "last"],
738 "enumDescriptions": [
739 "No merging",
740 "Merge all layers of the import trees",
741 "Only merge the last layer of the import trees"
742 ],
743 },
744 "ImportPrefixDef" => set! {
745 "type": "string",
746 "enum": [
747 "plain",
748 "by_self",
749 "by_crate"
750 ],
751 "enumDescriptions": [
752 "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item.",
753 "Prefix all import paths with `self` if they don't begin with `self`, `super`, `crate` or a crate name",
754 "Force import paths to be absolute by always starting them with `crate` or the crate name they refer to."
755 ],
756 },
757 "Vec<ManifestOrProjectJson>" => set! {
758 "type": "array",
759 "items": { "type": ["string", "object"] },
760 },
761 _ => panic!("{}: {}", ty, default),
762 }
763
764 map.into()
765}
766
767#[cfg(test)]
768fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String {
769 fields
770 .iter()
771 .map(|(field, _ty, doc, default)| {
772 let name = field.replace("_", ".");
773 let name = format!("rust-analyzer.{} (default: `{}`)", name, default);
774 format!("{}::\n{}\n", name, doc.join(" "))
775 })
776 .collect::<String>()
777}
778
779#[cfg(test)]
780mod tests {
781 use std::fs;
782
783 use test_utils::project_dir;
784
785 use super::*;
786
787 #[test]
788 fn schema_in_sync_with_package_json() {
789 let s = Config::json_schema();
790 let schema = format!("{:#}", s);
791 let schema = schema.trim_start_matches('{').trim_end_matches('}');
792
793 let package_json = project_dir().join("editors/code/package.json");
794 let package_json = fs::read_to_string(&package_json).unwrap();
795
796 let p = remove_ws(&package_json);
797 let s = remove_ws(&schema);
798
799 assert!(p.contains(&s), "update config in package.json. New config:\n{:#}", schema);
800 }
801
802 #[test]
803 fn schema_in_sync_with_docs() {
804 let docs_path = project_dir().join("docs/user/generated_config.adoc");
805 let current = fs::read_to_string(&docs_path).unwrap();
806 let expected = ConfigData::manual();
807
808 if remove_ws(&current) != remove_ws(&expected) {
809 fs::write(&docs_path, expected).unwrap();
810 panic!("updated config manual");
811 }
812 }
813
814 fn remove_ws(text: &str) -> String {
815 text.replace(char::is_whitespace, "")
551 } 816 }
552} 817}
diff --git a/crates/test_utils/src/lib.rs b/crates/test_utils/src/lib.rs
index a49be4602..05940a546 100644
--- a/crates/test_utils/src/lib.rs
+++ b/crates/test_utils/src/lib.rs
@@ -321,12 +321,11 @@ fn lines_match_works() {
321/// as paths). You can use a `"{...}"` string literal as a wildcard for 321/// as paths). You can use a `"{...}"` string literal as a wildcard for
322/// arbitrary nested JSON. Arrays are sorted before comparison. 322/// arbitrary nested JSON. Arrays are sorted before comparison.
323pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a Value, &'a Value)> { 323pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a Value, &'a Value)> {
324 use serde_json::Value::*;
325 match (expected, actual) { 324 match (expected, actual) {
326 (&Number(ref l), &Number(ref r)) if l == r => None, 325 (Value::Number(l), Value::Number(r)) if l == r => None,
327 (&Bool(l), &Bool(r)) if l == r => None, 326 (Value::Bool(l), Value::Bool(r)) if l == r => None,
328 (&String(ref l), &String(ref r)) if lines_match(l, r) => None, 327 (Value::String(l), Value::String(r)) if lines_match(l, r) => None,
329 (&Array(ref l), &Array(ref r)) => { 328 (Value::Array(l), Value::Array(r)) => {
330 if l.len() != r.len() { 329 if l.len() != r.len() {
331 return Some((expected, actual)); 330 return Some((expected, actual));
332 } 331 }
@@ -350,17 +349,26 @@ pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a
350 None 349 None
351 } 350 }
352 } 351 }
353 (&Object(ref l), &Object(ref r)) => { 352 (Value::Object(l), Value::Object(r)) => {
353 fn sorted_values(obj: &serde_json::Map<String, Value>) -> Vec<&Value> {
354 let mut entries = obj.iter().collect::<Vec<_>>();
355 entries.sort_by_key(|it| it.0);
356 entries.into_iter().map(|(_k, v)| v).collect::<Vec<_>>()
357 }
358
354 let same_keys = l.len() == r.len() && l.keys().all(|k| r.contains_key(k)); 359 let same_keys = l.len() == r.len() && l.keys().all(|k| r.contains_key(k));
355 if !same_keys { 360 if !same_keys {
356 return Some((expected, actual)); 361 return Some((expected, actual));
357 } 362 }
358 363
359 l.values().zip(r.values()).filter_map(|(l, r)| find_mismatch(l, r)).next() 364 let l = sorted_values(l);
365 let r = sorted_values(r);
366
367 l.into_iter().zip(r).filter_map(|(l, r)| find_mismatch(l, r)).next()
360 } 368 }
361 (&Null, &Null) => None, 369 (Value::Null, Value::Null) => None,
362 // magic string literal "{...}" acts as wildcard for any sub-JSON 370 // magic string literal "{...}" acts as wildcard for any sub-JSON
363 (&String(ref l), _) if l == "{...}" => None, 371 (Value::String(l), _) if l == "{...}" => None,
364 _ => Some((expected, actual)), 372 _ => Some((expected, actual)),
365 } 373 }
366} 374}