aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock17
-rw-r--r--crates/flycheck/src/lib.rs1
-rw-r--r--crates/hir/src/lib.rs9
-rw-r--r--crates/hir_def/src/body.rs4
-rw-r--r--crates/hir_def/src/body/lower.rs10
-rw-r--r--crates/hir_def/src/expr.rs4
-rw-r--r--crates/hir_def/src/find_path.rs13
-rw-r--r--crates/hir_def/src/nameres/collector.rs41
-rw-r--r--crates/hir_def/src/nameres/tests.rs16
-rw-r--r--crates/hir_def/src/type_ref.rs73
-rw-r--r--crates/hir_expand/src/builtin_macro.rs35
-rw-r--r--crates/hir_expand/src/name.rs1
-rw-r--r--crates/hir_ty/src/consteval.rs56
-rw-r--r--crates/hir_ty/src/consts.rs21
-rw-r--r--crates/hir_ty/src/diagnostics/match_check.rs1
-rw-r--r--crates/hir_ty/src/display.rs5
-rw-r--r--crates/hir_ty/src/infer/expr.rs32
-rw-r--r--crates/hir_ty/src/infer/pat.rs9
-rw-r--r--crates/hir_ty/src/interner.rs3
-rw-r--r--crates/hir_ty/src/lib.rs10
-rw-r--r--crates/hir_ty/src/lower.rs11
-rw-r--r--crates/hir_ty/src/tests/coercion.rs46
-rw-r--r--crates/hir_ty/src/tests/patterns.rs10
-rw-r--r--crates/hir_ty/src/tests/simple.rs65
-rw-r--r--crates/hir_ty/src/tests/traits.rs97
-rw-r--r--crates/ide/src/diagnostics.rs596
-rw-r--r--crates/ide/src/diagnostics/field_shorthand.rs13
-rw-r--r--crates/ide/src/diagnostics/fixes.rs296
-rw-r--r--crates/ide/src/diagnostics/fixes/change_case.rs155
-rw-r--r--crates/ide/src/diagnostics/fixes/create_field.rs156
-rw-r--r--crates/ide/src/diagnostics/fixes/fill_missing_fields.rs217
-rw-r--r--crates/ide/src/diagnostics/fixes/remove_semicolon.rs41
-rw-r--r--crates/ide/src/diagnostics/fixes/replace_with_find_map.rs84
-rw-r--r--crates/ide/src/diagnostics/fixes/unresolved_module.rs89
-rw-r--r--crates/ide/src/diagnostics/fixes/wrap_tail_expr.rs211
-rw-r--r--crates/ide/src/diagnostics/unlinked_file.rs54
-rw-r--r--crates/ide/src/doc_links.rs8
-rw-r--r--crates/ide/src/join_lines.rs2
-rw-r--r--crates/ide/src/lib.rs2
-rw-r--r--crates/ide/src/matching_brace.rs12
-rw-r--r--crates/ide/src/parent_module.rs6
-rw-r--r--crates/ide/src/references/rename.rs12
-rw-r--r--crates/ide/src/syntax_highlighting/highlight.rs11
-rw-r--r--crates/ide/src/syntax_highlighting/inject.rs2
-rw-r--r--crates/ide/src/syntax_highlighting/tags.rs4
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html14
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlighting.html15
-rw-r--r--crates/ide/src/syntax_highlighting/tests.rs27
-rw-r--r--crates/ide_assists/src/assist_context.rs6
-rw-r--r--crates/ide_assists/src/handlers/add_explicit_type.rs28
-rw-r--r--crates/ide_assists/src/handlers/add_missing_impl_members.rs3
-rw-r--r--crates/ide_assists/src/handlers/auto_import.rs30
-rw-r--r--crates/ide_assists/src/handlers/expand_glob_import.rs2
-rw-r--r--crates/ide_assists/src/handlers/extract_function.rs65
-rw-r--r--crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs6
-rw-r--r--crates/ide_assists/src/handlers/fill_match_arms.rs633
-rw-r--r--crates/ide_assists/src/handlers/generate_default_from_new.rs287
-rw-r--r--crates/ide_assists/src/handlers/generate_new.rs153
-rw-r--r--crates/ide_assists/src/handlers/introduce_named_lifetime.rs12
-rw-r--r--crates/ide_assists/src/handlers/merge_imports.rs8
-rw-r--r--crates/ide_assists/src/handlers/move_bounds.rs4
-rw-r--r--crates/ide_assists/src/handlers/move_module_to_file.rs38
-rw-r--r--crates/ide_assists/src/handlers/pull_assignment_up.rs4
-rw-r--r--crates/ide_assists/src/handlers/raw_string.rs2
-rw-r--r--crates/ide_assists/src/handlers/reorder_fields.rs4
-rw-r--r--crates/ide_assists/src/handlers/reorder_impl.rs3
-rw-r--r--crates/ide_assists/src/handlers/replace_impl_trait_with_generic.rs4
-rw-r--r--crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs2
-rw-r--r--crates/ide_assists/src/handlers/replace_string_with_char.rs2
-rw-r--r--crates/ide_assists/src/handlers/replace_unwrap_with_match.rs2
-rw-r--r--crates/ide_assists/src/tests/generated.rs1
-rw-r--r--crates/ide_assists/src/utils.rs48
-rw-r--r--crates/ide_completion/src/completions/keyword.rs22
-rw-r--r--crates/ide_db/src/helpers/merge_imports.rs38
-rw-r--r--crates/ide_db/src/symbol_index.rs5
-rw-r--r--crates/project_model/src/build_data.rs1
-rw-r--r--crates/rust-analyzer/src/caps.rs1
-rw-r--r--crates/rust-analyzer/src/config.rs73
-rw-r--r--crates/rust-analyzer/src/handlers.rs61
-rw-r--r--crates/rust-analyzer/src/integrated_benchmarks.rs4
-rw-r--r--crates/rust-analyzer/src/lsp_ext.rs42
-rw-r--r--crates/rust-analyzer/src/main_loop.rs2
-rw-r--r--crates/rust-analyzer/src/semantic_tokens.rs5
-rw-r--r--crates/rust-analyzer/src/to_proto.rs11
-rw-r--r--crates/rust-analyzer/tests/slow-tests/main.rs (renamed from crates/rust-analyzer/tests/rust-analyzer/main.rs)0
-rw-r--r--crates/rust-analyzer/tests/slow-tests/support.rs (renamed from crates/rust-analyzer/tests/rust-analyzer/support.rs)0
-rw-r--r--crates/rust-analyzer/tests/slow-tests/testdir.rs (renamed from crates/rust-analyzer/tests/rust-analyzer/testdir.rs)0
-rw-r--r--crates/syntax/Cargo.toml2
-rw-r--r--crates/syntax/src/algo.rs133
-rw-r--r--crates/syntax/src/ast.rs6
-rw-r--r--crates/syntax/src/ast/edit.rs493
-rw-r--r--crates/syntax/src/ast/edit_in_place.rs182
-rw-r--r--crates/syntax/src/ast/make.rs12
-rw-r--r--crates/syntax/src/ast/token_ext.rs81
-rw-r--r--crates/syntax/src/parsing.rs7
-rw-r--r--crates/syntax/src/parsing/text_tree_sink.rs4
-rw-r--r--crates/syntax/test_data/parser/ok/0045_block_attrs.rast6
-rw-r--r--docs/dev/lsp-extensions.md36
-rw-r--r--docs/dev/style.md38
-rw-r--r--docs/user/generated_config.adoc19
-rw-r--r--docs/user/manual.adoc1
-rw-r--r--editors/code/package.json31
-rw-r--r--xtask/src/tidy.rs5
103 files changed, 3115 insertions, 2170 deletions
diff --git a/Cargo.lock b/Cargo.lock
index f9c34547e..76a26ea4e 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -437,18 +437,18 @@ dependencies = [
437 437
438[[package]] 438[[package]]
439name = "fsevent-sys" 439name = "fsevent-sys"
440version = "3.0.2" 440version = "3.1.0"
441source = "registry+https://github.com/rust-lang/crates.io-index" 441source = "registry+https://github.com/rust-lang/crates.io-index"
442checksum = "77a29c77f1ca394c3e73a9a5d24cfcabb734682d9634fc398f2204a63c994120" 442checksum = "ca6f5e6817058771c10f0eb0f05ddf1e35844266f972004fe8e4b21fda295bd5"
443dependencies = [ 443dependencies = [
444 "libc", 444 "libc",
445] 445]
446 446
447[[package]] 447[[package]]
448name = "fst" 448name = "fst"
449version = "0.4.5" 449version = "0.4.6"
450source = "registry+https://github.com/rust-lang/crates.io-index" 450source = "registry+https://github.com/rust-lang/crates.io-index"
451checksum = "d79238883cf0307100b90aba4a755d8051a3182305dfe7f649a1e9dc0517006f" 451checksum = "e398fae362f4124bbe630d99519fb2d68a03e2e3a23b441028cdcdc4f4895687"
452 452
453[[package]] 453[[package]]
454name = "gimli" 454name = "gimli"
@@ -939,11 +939,10 @@ dependencies = [
939 939
940[[package]] 940[[package]]
941name = "notify" 941name = "notify"
942version = "5.0.0-pre.7" 942version = "5.0.0-pre.8"
943source = "registry+https://github.com/rust-lang/crates.io-index" 943source = "registry+https://github.com/rust-lang/crates.io-index"
944checksum = "1ebe7699a0f8c5759450716ee03d231685c22b4fe8f406c42c22e0ad94d40ce7" 944checksum = "46bbbcd078f1f00ddb7a9abe70b96e91229b44b0b3afdec610f8e5137f8f014b"
945dependencies = [ 945dependencies = [
946 "anymap",
947 "bitflags", 946 "bitflags",
948 "crossbeam-channel", 947 "crossbeam-channel",
949 "filetime", 948 "filetime",
@@ -1295,9 +1294,9 @@ checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
1295 1294
1296[[package]] 1295[[package]]
1297name = "rowan" 1296name = "rowan"
1298version = "0.13.0-pre.5" 1297version = "0.13.0-pre.6"
1299source = "registry+https://github.com/rust-lang/crates.io-index" 1298source = "registry+https://github.com/rust-lang/crates.io-index"
1300checksum = "32a5fc82ed0b7e7fba157331f0d8f64abd73bced6e7ac2a4dfa0c4cf0ab584e8" 1299checksum = "82ccc04e145e9a5ab51b9c12a81d77c4a8250d87a407ab02ac650451141ff00d"
1301dependencies = [ 1300dependencies = [
1302 "countme", 1301 "countme",
1303 "hashbrown", 1302 "hashbrown",
diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs
index 1682d8bde..93cf6a3d6 100644
--- a/crates/flycheck/src/lib.rs
+++ b/crates/flycheck/src/lib.rs
@@ -215,6 +215,7 @@ impl FlycheckActor {
215 } => { 215 } => {
216 let mut cmd = Command::new(toolchain::cargo()); 216 let mut cmd = Command::new(toolchain::cargo());
217 cmd.arg(command); 217 cmd.arg(command);
218 cmd.current_dir(&self.workspace_root);
218 cmd.args(&["--workspace", "--message-format=json", "--manifest-path"]) 219 cmd.args(&["--workspace", "--message-format=json", "--manifest-path"])
219 .arg(self.workspace_root.join("Cargo.toml")); 220 .arg(self.workspace_root.join("Cargo.toml"));
220 221
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index c9ef4b420..d443b124c 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -52,7 +52,9 @@ use hir_def::{
52}; 52};
53use hir_expand::{diagnostics::DiagnosticSink, name::name, MacroDefKind}; 53use hir_expand::{diagnostics::DiagnosticSink, name::name, MacroDefKind};
54use hir_ty::{ 54use hir_ty::{
55 autoderef, could_unify, 55 autoderef,
56 consteval::ConstExt,
57 could_unify,
56 method_resolution::{self, def_crates, TyFingerprint}, 58 method_resolution::{self, def_crates, TyFingerprint},
57 primitive::UintTy, 59 primitive::UintTy,
58 subst_prefix, 60 subst_prefix,
@@ -873,6 +875,10 @@ impl Function {
873 db.function_data(self.id).is_unsafe() 875 db.function_data(self.id).is_unsafe()
874 } 876 }
875 877
878 pub fn is_async(self, db: &dyn HirDatabase) -> bool {
879 db.function_data(self.id).is_async()
880 }
881
876 pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) { 882 pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) {
877 let krate = self.module(db).id.krate(); 883 let krate = self.module(db).id.krate();
878 hir_def::diagnostics::validate_body(db.upcast(), self.id.into(), sink); 884 hir_def::diagnostics::validate_body(db.upcast(), self.id.into(), sink);
@@ -1910,6 +1916,7 @@ impl Type {
1910 substs.iter(&Interner).filter_map(|a| a.ty(&Interner)).any(go) 1916 substs.iter(&Interner).filter_map(|a| a.ty(&Interner)).any(go)
1911 } 1917 }
1912 1918
1919 TyKind::Array(_ty, len) if len.is_unknown() => true,
1913 TyKind::Array(ty, _) 1920 TyKind::Array(ty, _)
1914 | TyKind::Slice(ty) 1921 | TyKind::Slice(ty)
1915 | TyKind::Raw(_, ty) 1922 | TyKind::Raw(_, ty)
diff --git a/crates/hir_def/src/body.rs b/crates/hir_def/src/body.rs
index 8360426f1..98b485b60 100644
--- a/crates/hir_def/src/body.rs
+++ b/crates/hir_def/src/body.rs
@@ -21,8 +21,6 @@ use profile::Count;
21use rustc_hash::FxHashMap; 21use rustc_hash::FxHashMap;
22use syntax::{ast, AstNode, AstPtr}; 22use syntax::{ast, AstNode, AstPtr};
23 23
24pub use lower::LowerCtx;
25
26use crate::{ 24use crate::{
27 attr::{Attrs, RawAttrs}, 25 attr::{Attrs, RawAttrs},
28 db::DefDatabase, 26 db::DefDatabase,
@@ -35,6 +33,8 @@ use crate::{
35 UnresolvedMacro, 33 UnresolvedMacro,
36}; 34};
37 35
36pub use lower::LowerCtx;
37
38/// A subset of Expander that only deals with cfg attributes. We only need it to 38/// A subset of Expander that only deals with cfg attributes. We only need it to
39/// avoid cyclic queries in crate def map during enum processing. 39/// avoid cyclic queries in crate def map during enum processing.
40#[derive(Debug)] 40#[derive(Debug)]
diff --git a/crates/hir_def/src/body/lower.rs b/crates/hir_def/src/body/lower.rs
index 9f278d35b..2a7e0205f 100644
--- a/crates/hir_def/src/body/lower.rs
+++ b/crates/hir_def/src/body/lower.rs
@@ -1006,23 +1006,27 @@ impl From<ast::BinOp> for BinaryOp {
1006impl From<ast::LiteralKind> for Literal { 1006impl From<ast::LiteralKind> for Literal {
1007 fn from(ast_lit_kind: ast::LiteralKind) -> Self { 1007 fn from(ast_lit_kind: ast::LiteralKind) -> Self {
1008 match ast_lit_kind { 1008 match ast_lit_kind {
1009 // FIXME: these should have actual values filled in, but unsure on perf impact
1009 LiteralKind::IntNumber(lit) => { 1010 LiteralKind::IntNumber(lit) => {
1010 if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) { 1011 if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) {
1011 return Literal::Float(Default::default(), builtin); 1012 return Literal::Float(Default::default(), builtin);
1012 } else if let builtin @ Some(_) = 1013 } else if let builtin @ Some(_) =
1013 lit.suffix().and_then(|it| BuiltinInt::from_suffix(&it)) 1014 lit.suffix().and_then(|it| BuiltinInt::from_suffix(&it))
1014 { 1015 {
1015 Literal::Int(Default::default(), builtin) 1016 Literal::Int(lit.value().unwrap_or(0) as i128, builtin)
1016 } else { 1017 } else {
1017 let builtin = lit.suffix().and_then(|it| BuiltinUint::from_suffix(&it)); 1018 let builtin = lit.suffix().and_then(|it| BuiltinUint::from_suffix(&it));
1018 Literal::Uint(Default::default(), builtin) 1019 Literal::Uint(lit.value().unwrap_or(0), builtin)
1019 } 1020 }
1020 } 1021 }
1021 LiteralKind::FloatNumber(lit) => { 1022 LiteralKind::FloatNumber(lit) => {
1022 let ty = lit.suffix().and_then(|it| BuiltinFloat::from_suffix(&it)); 1023 let ty = lit.suffix().and_then(|it| BuiltinFloat::from_suffix(&it));
1023 Literal::Float(Default::default(), ty) 1024 Literal::Float(Default::default(), ty)
1024 } 1025 }
1025 LiteralKind::ByteString(_) => Literal::ByteString(Default::default()), 1026 LiteralKind::ByteString(bs) => {
1027 let text = bs.value().map(Vec::from).unwrap_or_else(Default::default);
1028 Literal::ByteString(text)
1029 }
1026 LiteralKind::String(_) => Literal::String(Default::default()), 1030 LiteralKind::String(_) => Literal::String(Default::default()),
1027 LiteralKind::Byte => Literal::Uint(Default::default(), Some(BuiltinUint::U8)), 1031 LiteralKind::Byte => Literal::Uint(Default::default(), Some(BuiltinUint::U8)),
1028 LiteralKind::Bool(val) => Literal::Bool(val), 1032 LiteralKind::Bool(val) => Literal::Bool(val),
diff --git a/crates/hir_def/src/expr.rs b/crates/hir_def/src/expr.rs
index 0c3b41080..2ba619d23 100644
--- a/crates/hir_def/src/expr.rs
+++ b/crates/hir_def/src/expr.rs
@@ -43,8 +43,8 @@ pub enum Literal {
43 ByteString(Vec<u8>), 43 ByteString(Vec<u8>),
44 Char(char), 44 Char(char),
45 Bool(bool), 45 Bool(bool),
46 Int(u64, Option<BuiltinInt>), 46 Int(i128, Option<BuiltinInt>),
47 Uint(u64, Option<BuiltinUint>), 47 Uint(u128, Option<BuiltinUint>),
48 Float(u64, Option<BuiltinFloat>), // FIXME: f64 is not Eq 48 Float(u64, Option<BuiltinFloat>), // FIXME: f64 is not Eq
49} 49}
50 50
diff --git a/crates/hir_def/src/find_path.rs b/crates/hir_def/src/find_path.rs
index 858e88038..ee52794aa 100644
--- a/crates/hir_def/src/find_path.rs
+++ b/crates/hir_def/src/find_path.rs
@@ -5,10 +5,10 @@ use std::iter;
5use hir_expand::name::{known, AsName, Name}; 5use hir_expand::name::{known, AsName, Name};
6use rustc_hash::FxHashSet; 6use rustc_hash::FxHashSet;
7 7
8use crate::nameres::DefMap;
9use crate::{ 8use crate::{
10 db::DefDatabase, 9 db::DefDatabase,
11 item_scope::ItemInNs, 10 item_scope::ItemInNs,
11 nameres::DefMap,
12 path::{ModPath, PathKind}, 12 path::{ModPath, PathKind},
13 visibility::Visibility, 13 visibility::Visibility,
14 ModuleDefId, ModuleId, 14 ModuleDefId, ModuleId,
@@ -134,7 +134,16 @@ fn find_path_inner(
134 for (name, def_id) in root_def_map.extern_prelude() { 134 for (name, def_id) in root_def_map.extern_prelude() {
135 if item == ItemInNs::Types(*def_id) { 135 if item == ItemInNs::Types(*def_id) {
136 let name = scope_name.unwrap_or_else(|| name.clone()); 136 let name = scope_name.unwrap_or_else(|| name.clone());
137 return Some(ModPath::from_segments(PathKind::Plain, vec![name])); 137
138 let name_already_occupied_in_type_ns = def_map
139 .with_ancestor_maps(db, from.local_id, &mut |def_map, local_id| {
140 def_map[local_id].scope.get(&name).take_types().filter(|&id| id != *def_id)
141 })
142 .is_some();
143 return Some(ModPath::from_segments(
144 if name_already_occupied_in_type_ns { PathKind::Abs } else { PathKind::Plain },
145 vec![name],
146 ));
138 } 147 }
139 } 148 }
140 149
diff --git a/crates/hir_def/src/nameres/collector.rs b/crates/hir_def/src/nameres/collector.rs
index 66d9396aa..221a5a556 100644
--- a/crates/hir_def/src/nameres/collector.rs
+++ b/crates/hir_def/src/nameres/collector.rs
@@ -539,6 +539,11 @@ impl DefCollector<'_> {
539 let res = self.def_map.resolve_name_in_extern_prelude(self.db, &extern_crate.name); 539 let res = self.def_map.resolve_name_in_extern_prelude(self.db, &extern_crate.name);
540 540
541 if let Some(ModuleDefId::ModuleId(m)) = res.take_types() { 541 if let Some(ModuleDefId::ModuleId(m)) = res.take_types() {
542 if m == self.def_map.module_id(current_module_id) {
543 cov_mark::hit!(ignore_macro_use_extern_crate_self);
544 return;
545 }
546
542 cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use); 547 cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use);
543 self.import_all_macros_exported(current_module_id, m.krate); 548 self.import_all_macros_exported(current_module_id, m.krate);
544 } 549 }
@@ -864,6 +869,17 @@ impl DefCollector<'_> {
864 let mut resolved = Vec::new(); 869 let mut resolved = Vec::new();
865 let mut res = ReachedFixedPoint::Yes; 870 let mut res = ReachedFixedPoint::Yes;
866 macros.retain(|directive| { 871 macros.retain(|directive| {
872 let resolver = |path| {
873 let resolved_res = self.def_map.resolve_path_fp_with_macro(
874 self.db,
875 ResolveMode::Other,
876 directive.module_id,
877 &path,
878 BuiltinShadowMode::Module,
879 );
880 resolved_res.resolved_def.take_macros()
881 };
882
867 match &directive.kind { 883 match &directive.kind {
868 MacroDirectiveKind::FnLike { ast_id, fragment } => { 884 MacroDirectiveKind::FnLike { ast_id, fragment } => {
869 match macro_call_as_call_id( 885 match macro_call_as_call_id(
@@ -871,16 +887,7 @@ impl DefCollector<'_> {
871 *fragment, 887 *fragment,
872 self.db, 888 self.db,
873 self.def_map.krate, 889 self.def_map.krate,
874 |path| { 890 &resolver,
875 let resolved_res = self.def_map.resolve_path_fp_with_macro(
876 self.db,
877 ResolveMode::Other,
878 directive.module_id,
879 &path,
880 BuiltinShadowMode::Module,
881 );
882 resolved_res.resolved_def.take_macros()
883 },
884 &mut |_err| (), 891 &mut |_err| (),
885 ) { 892 ) {
886 Ok(Ok(call_id)) => { 893 Ok(Ok(call_id)) => {
@@ -897,7 +904,7 @@ impl DefCollector<'_> {
897 *derive_attr, 904 *derive_attr,
898 self.db, 905 self.db,
899 self.def_map.krate, 906 self.def_map.krate,
900 |path| self.resolve_derive_macro(directive.module_id, &path), 907 &resolver,
901 ) { 908 ) {
902 Ok(call_id) => { 909 Ok(call_id) => {
903 resolved.push((directive.module_id, call_id, directive.depth)); 910 resolved.push((directive.module_id, call_id, directive.depth));
@@ -923,18 +930,6 @@ impl DefCollector<'_> {
923 res 930 res
924 } 931 }
925 932
926 fn resolve_derive_macro(&self, module: LocalModuleId, path: &ModPath) -> Option<MacroDefId> {
927 let resolved_res = self.def_map.resolve_path_fp_with_macro(
928 self.db,
929 ResolveMode::Other,
930 module,
931 &path,
932 BuiltinShadowMode::Module,
933 );
934
935 resolved_res.resolved_def.take_macros()
936 }
937
938 fn collect_macro_expansion( 933 fn collect_macro_expansion(
939 &mut self, 934 &mut self,
940 module_id: LocalModuleId, 935 module_id: LocalModuleId,
diff --git a/crates/hir_def/src/nameres/tests.rs b/crates/hir_def/src/nameres/tests.rs
index 4f2e7a2f9..9f652731d 100644
--- a/crates/hir_def/src/nameres/tests.rs
+++ b/crates/hir_def/src/nameres/tests.rs
@@ -411,6 +411,22 @@ struct Arc;
411} 411}
412 412
413#[test] 413#[test]
414fn macro_use_extern_crate_self() {
415 cov_mark::check!(ignore_macro_use_extern_crate_self);
416 check(
417 r#"
418//- /main.rs crate:main
419#[macro_use]
420extern crate self as bla;
421"#,
422 expect![[r#"
423 crate
424 bla: t
425 "#]],
426 );
427}
428
429#[test]
414fn reexport_across_crates() { 430fn reexport_across_crates() {
415 check( 431 check(
416 r#" 432 r#"
diff --git a/crates/hir_def/src/type_ref.rs b/crates/hir_def/src/type_ref.rs
index ea29da5da..9e44547cb 100644
--- a/crates/hir_def/src/type_ref.rs
+++ b/crates/hir_def/src/type_ref.rs
@@ -2,6 +2,7 @@
2//! be directly created from an ast::TypeRef, without further queries. 2//! be directly created from an ast::TypeRef, without further queries.
3 3
4use hir_expand::{name::Name, AstId, InFile}; 4use hir_expand::{name::Name, AstId, InFile};
5use std::convert::TryInto;
5use syntax::ast; 6use syntax::ast;
6 7
7use crate::{body::LowerCtx, path::Path}; 8use crate::{body::LowerCtx, path::Path};
@@ -79,7 +80,9 @@ pub enum TypeRef {
79 Path(Path), 80 Path(Path),
80 RawPtr(Box<TypeRef>, Mutability), 81 RawPtr(Box<TypeRef>, Mutability),
81 Reference(Box<TypeRef>, Option<LifetimeRef>, Mutability), 82 Reference(Box<TypeRef>, Option<LifetimeRef>, Mutability),
82 Array(Box<TypeRef> /*, Expr*/), 83 // FIXME: for full const generics, the latter element (length) here is going to have to be an
84 // expression that is further lowered later in hir_ty.
85 Array(Box<TypeRef>, ConstScalar),
83 Slice(Box<TypeRef>), 86 Slice(Box<TypeRef>),
84 /// A fn pointer. Last element of the vector is the return type. 87 /// A fn pointer. Last element of the vector is the return type.
85 Fn(Vec<TypeRef>, bool /*varargs*/), 88 Fn(Vec<TypeRef>, bool /*varargs*/),
@@ -140,7 +143,16 @@ impl TypeRef {
140 TypeRef::RawPtr(Box::new(inner_ty), mutability) 143 TypeRef::RawPtr(Box::new(inner_ty), mutability)
141 } 144 }
142 ast::Type::ArrayType(inner) => { 145 ast::Type::ArrayType(inner) => {
143 TypeRef::Array(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty()))) 146 // FIXME: This is a hack. We should probably reuse the machinery of
147 // `hir_def::body::lower` to lower this into an `Expr` and then evaluate it at the
148 // `hir_ty` level, which would allow knowing the type of:
149 // let v: [u8; 2 + 2] = [0u8; 4];
150 let len = inner
151 .expr()
152 .map(ConstScalar::usize_from_literal_expr)
153 .unwrap_or(ConstScalar::Unknown);
154
155 TypeRef::Array(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty())), len)
144 } 156 }
145 ast::Type::SliceType(inner) => { 157 ast::Type::SliceType(inner) => {
146 TypeRef::Slice(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty()))) 158 TypeRef::Slice(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty())))
@@ -212,7 +224,7 @@ impl TypeRef {
212 } 224 }
213 TypeRef::RawPtr(type_ref, _) 225 TypeRef::RawPtr(type_ref, _)
214 | TypeRef::Reference(type_ref, ..) 226 | TypeRef::Reference(type_ref, ..)
215 | TypeRef::Array(type_ref) 227 | TypeRef::Array(type_ref, _)
216 | TypeRef::Slice(type_ref) => go(&type_ref, f), 228 | TypeRef::Slice(type_ref) => go(&type_ref, f),
217 TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => { 229 TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => {
218 for bound in bounds { 230 for bound in bounds {
@@ -298,3 +310,58 @@ impl TypeBound {
298 } 310 }
299 } 311 }
300} 312}
313
314/// A concrete constant value
315#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
316pub enum ConstScalar {
317 // for now, we only support the trivial case of constant evaluating the length of an array
318 // Note that this is u64 because the target usize may be bigger than our usize
319 Usize(u64),
320
321 /// Case of an unknown value that rustc might know but we don't
322 // FIXME: this is a hack to get around chalk not being able to represent unevaluatable
323 // constants
324 // https://github.com/rust-analyzer/rust-analyzer/pull/8813#issuecomment-840679177
325 // https://rust-lang.zulipchat.com/#narrow/stream/144729-wg-traits/topic/Handling.20non.20evaluatable.20constants'.20equality/near/238386348
326 Unknown,
327}
328
329impl std::fmt::Display for ConstScalar {
330 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
331 match self {
332 ConstScalar::Usize(us) => write!(fmt, "{}", us),
333 ConstScalar::Unknown => write!(fmt, "_"),
334 }
335 }
336}
337
338impl ConstScalar {
339 /// Gets a target usize out of the ConstScalar
340 pub fn as_usize(&self) -> Option<u64> {
341 match self {
342 &ConstScalar::Usize(us) => Some(us),
343 _ => None,
344 }
345 }
346
347 // FIXME: as per the comments on `TypeRef::Array`, this evaluation should not happen at this
348 // parse stage.
349 fn usize_from_literal_expr(expr: ast::Expr) -> ConstScalar {
350 match expr {
351 ast::Expr::Literal(lit) => {
352 let lkind = lit.kind();
353 match lkind {
354 ast::LiteralKind::IntNumber(num)
355 if num.suffix() == None || num.suffix() == Some("usize") =>
356 {
357 num.value().and_then(|v| v.try_into().ok())
358 }
359 _ => None,
360 }
361 }
362 _ => None,
363 }
364 .map(ConstScalar::Usize)
365 .unwrap_or(ConstScalar::Unknown)
366 }
367}
diff --git a/crates/hir_expand/src/builtin_macro.rs b/crates/hir_expand/src/builtin_macro.rs
index af9802144..280c25f11 100644
--- a/crates/hir_expand/src/builtin_macro.rs
+++ b/crates/hir_expand/src/builtin_macro.rs
@@ -118,6 +118,7 @@ register_builtin! {
118 EAGER: 118 EAGER:
119 (compile_error, CompileError) => compile_error_expand, 119 (compile_error, CompileError) => compile_error_expand,
120 (concat, Concat) => concat_expand, 120 (concat, Concat) => concat_expand,
121 (concat_idents, ConcatIdents) => concat_idents_expand,
121 (include, Include) => include_expand, 122 (include, Include) => include_expand,
122 (include_bytes, IncludeBytes) => include_bytes_expand, 123 (include_bytes, IncludeBytes) => include_bytes_expand,
123 (include_str, IncludeStr) => include_str_expand, 124 (include_str, IncludeStr) => include_str_expand,
@@ -373,6 +374,28 @@ fn concat_expand(
373 ExpandResult { value: Some(ExpandedEager::new(quote!(#text), FragmentKind::Expr)), err } 374 ExpandResult { value: Some(ExpandedEager::new(quote!(#text), FragmentKind::Expr)), err }
374} 375}
375 376
377fn concat_idents_expand(
378 _db: &dyn AstDatabase,
379 _arg_id: EagerMacroId,
380 tt: &tt::Subtree,
381) -> ExpandResult<Option<ExpandedEager>> {
382 let mut err = None;
383 let mut ident = String::new();
384 for (i, t) in tt.token_trees.iter().enumerate() {
385 match t {
386 tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => {
387 ident.push_str(id.text.as_str());
388 }
389 tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
390 _ => {
391 err.get_or_insert(mbe::ExpandError::UnexpectedToken);
392 }
393 }
394 }
395 let ident = tt::Ident { text: ident.into(), id: tt::TokenId::unspecified() };
396 ExpandResult { value: Some(ExpandedEager::new(quote!(#ident), FragmentKind::Expr)), err }
397}
398
376fn relative_file( 399fn relative_file(
377 db: &dyn AstDatabase, 400 db: &dyn AstDatabase,
378 call_id: MacroCallId, 401 call_id: MacroCallId,
@@ -794,4 +817,16 @@ mod tests {
794 expect![[r#""foor0bar\nfalse""#]], 817 expect![[r#""foor0bar\nfalse""#]],
795 ); 818 );
796 } 819 }
820
821 #[test]
822 fn test_concat_idents_expand() {
823 check_expansion(
824 r##"
825 #[rustc_builtin_macro]
826 macro_rules! concat_idents {}
827 concat_idents!(foo, bar);
828 "##,
829 expect![[r#"foobar"#]],
830 );
831 }
797} 832}
diff --git a/crates/hir_expand/src/name.rs b/crates/hir_expand/src/name.rs
index bcfd3e524..5a5dc9afd 100644
--- a/crates/hir_expand/src/name.rs
+++ b/crates/hir_expand/src/name.rs
@@ -212,6 +212,7 @@ pub mod known {
212 std_panic, 212 std_panic,
213 stringify, 213 stringify,
214 concat, 214 concat,
215 concat_idents,
215 include, 216 include,
216 include_bytes, 217 include_bytes,
217 include_str, 218 include_str,
diff --git a/crates/hir_ty/src/consteval.rs b/crates/hir_ty/src/consteval.rs
new file mode 100644
index 000000000..e3ceb3d62
--- /dev/null
+++ b/crates/hir_ty/src/consteval.rs
@@ -0,0 +1,56 @@
1//! Constant evaluation details
2
3use std::convert::TryInto;
4
5use hir_def::{
6 builtin_type::BuiltinUint,
7 expr::{Expr, Literal},
8 type_ref::ConstScalar,
9};
10
11use crate::{Const, ConstData, ConstValue, Interner, TyKind};
12
13/// Extension trait for [`Const`]
14pub trait ConstExt {
15 /// Is a [`Const`] unknown?
16 fn is_unknown(&self) -> bool;
17}
18
19impl ConstExt for Const {
20 fn is_unknown(&self) -> bool {
21 match self.data(&Interner).value {
22 // interned Unknown
23 chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst {
24 interned: ConstScalar::Unknown,
25 }) => true,
26
27 // interned concrete anything else
28 chalk_ir::ConstValue::Concrete(..) => false,
29
30 _ => {
31 log::error!("is_unknown was called on a non-concrete constant value! {:?}", self);
32 true
33 }
34 }
35 }
36}
37
38// FIXME: support more than just evaluating literals
39pub fn eval_usize(expr: &Expr) -> Option<u64> {
40 match expr {
41 Expr::Literal(Literal::Uint(v, None))
42 | Expr::Literal(Literal::Uint(v, Some(BuiltinUint::Usize))) => (*v).try_into().ok(),
43 _ => None,
44 }
45}
46
47/// Interns a possibly-unknown target usize
48pub fn usize_const(value: Option<u64>) -> Const {
49 ConstData {
50 ty: TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)).intern(&Interner),
51 value: ConstValue::Concrete(chalk_ir::ConcreteConst {
52 interned: value.map(|value| ConstScalar::Usize(value)).unwrap_or(ConstScalar::Unknown),
53 }),
54 }
55 .intern(&Interner)
56}
diff --git a/crates/hir_ty/src/consts.rs b/crates/hir_ty/src/consts.rs
deleted file mode 100644
index 0044b1cff..000000000
--- a/crates/hir_ty/src/consts.rs
+++ /dev/null
@@ -1,21 +0,0 @@
1//! Handling of concrete const values
2
3/// A concrete constant value
4#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
5pub enum ConstScalar {
6 // for now, we only support the trivial case of constant evaluating the length of an array
7 // Note that this is u64 because the target usize may be bigger than our usize
8 Usize(u64),
9
10 /// Case of an unknown value that rustc might know but we don't
11 Unknown,
12}
13
14impl std::fmt::Display for ConstScalar {
15 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
16 match self {
17 ConstScalar::Usize(us) => write!(fmt, "{}", us),
18 ConstScalar::Unknown => write!(fmt, "_"),
19 }
20 }
21}
diff --git a/crates/hir_ty/src/diagnostics/match_check.rs b/crates/hir_ty/src/diagnostics/match_check.rs
index e9762622f..6ee0529c6 100644
--- a/crates/hir_ty/src/diagnostics/match_check.rs
+++ b/crates/hir_ty/src/diagnostics/match_check.rs
@@ -1119,6 +1119,7 @@ fn main() {
1119 (true, false, true) => (), 1119 (true, false, true) => (),
1120 (true) => (), 1120 (true) => (),
1121 } 1121 }
1122 match (true, false) { (true,) => {} }
1122 match (0) { () => () } 1123 match (0) { () => () }
1123 match Unresolved::Bar { Unresolved::Baz => () } 1124 match Unresolved::Bar { Unresolved::Baz => () }
1124} 1125}
diff --git a/crates/hir_ty/src/display.rs b/crates/hir_ty/src/display.rs
index 8a4296697..7bbd1a1f7 100644
--- a/crates/hir_ty/src/display.rs
+++ b/crates/hir_ty/src/display.rs
@@ -962,11 +962,10 @@ impl HirDisplay for TypeRef {
962 write!(f, "{}", mutability)?; 962 write!(f, "{}", mutability)?;
963 inner.hir_fmt(f)?; 963 inner.hir_fmt(f)?;
964 } 964 }
965 TypeRef::Array(inner) => { 965 TypeRef::Array(inner, len) => {
966 write!(f, "[")?; 966 write!(f, "[")?;
967 inner.hir_fmt(f)?; 967 inner.hir_fmt(f)?;
968 // FIXME: Array length? 968 write!(f, "; {}]", len)?;
969 write!(f, "; _]")?;
970 } 969 }
971 TypeRef::Slice(inner) => { 970 TypeRef::Slice(inner) => {
972 write!(f, "[")?; 971 write!(f, "[")?;
diff --git a/crates/hir_ty/src/infer/expr.rs b/crates/hir_ty/src/infer/expr.rs
index 2178ffd07..b6b5a1b75 100644
--- a/crates/hir_ty/src/infer/expr.rs
+++ b/crates/hir_ty/src/infer/expr.rs
@@ -3,7 +3,7 @@
3use std::iter::{repeat, repeat_with}; 3use std::iter::{repeat, repeat_with};
4use std::{mem, sync::Arc}; 4use std::{mem, sync::Arc};
5 5
6use chalk_ir::{cast::Cast, fold::Shift, ConstData, Mutability, TyVariableKind}; 6use chalk_ir::{cast::Cast, fold::Shift, Mutability, TyVariableKind};
7use hir_def::{ 7use hir_def::{
8 expr::{Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp}, 8 expr::{Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp},
9 path::{GenericArg, GenericArgs}, 9 path::{GenericArg, GenericArgs},
@@ -15,9 +15,7 @@ use stdx::always;
15use syntax::ast::RangeOp; 15use syntax::ast::RangeOp;
16 16
17use crate::{ 17use crate::{
18 autoderef, 18 autoderef, consteval,
19 consts::ConstScalar,
20 dummy_usize_const,
21 lower::lower_to_chalk_mutability, 19 lower::lower_to_chalk_mutability,
22 mapping::from_chalk, 20 mapping::from_chalk,
23 method_resolution, op, 21 method_resolution, op,
@@ -25,7 +23,7 @@ use crate::{
25 static_lifetime, to_chalk_trait_id, 23 static_lifetime, to_chalk_trait_id,
26 traits::FnTrait, 24 traits::FnTrait,
27 utils::{generics, Generics}, 25 utils::{generics, Generics},
28 AdtId, Binders, CallableDefId, ConstValue, FnPointer, FnSig, FnSubst, InEnvironment, Interner, 26 AdtId, Binders, CallableDefId, FnPointer, FnSig, FnSubst, InEnvironment, Interner,
29 ProjectionTyExt, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind, 27 ProjectionTyExt, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind,
30}; 28};
31 29
@@ -724,7 +722,7 @@ impl<'a> InferenceContext<'a> {
724 for expr in items.iter() { 722 for expr in items.iter() {
725 self.infer_expr_coerce(*expr, &Expectation::has_type(elem_ty.clone())); 723 self.infer_expr_coerce(*expr, &Expectation::has_type(elem_ty.clone()));
726 } 724 }
727 Some(items.len()) 725 Some(items.len() as u64)
728 } 726 }
729 Array::Repeat { initializer, repeat } => { 727 Array::Repeat { initializer, repeat } => {
730 self.infer_expr_coerce( 728 self.infer_expr_coerce(
@@ -737,20 +735,13 @@ impl<'a> InferenceContext<'a> {
737 TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(&Interner), 735 TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(&Interner),
738 ), 736 ),
739 ); 737 );
740 // FIXME: support length for Repeat array expressions 738
741 None 739 let repeat_expr = &self.body.exprs[*repeat];
740 consteval::eval_usize(repeat_expr)
742 } 741 }
743 }; 742 };
744 743
745 let cd = ConstData { 744 TyKind::Array(elem_ty, consteval::usize_const(len)).intern(&Interner)
746 ty: TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(&Interner),
747 value: ConstValue::Concrete(chalk_ir::ConcreteConst {
748 interned: len
749 .map(|len| ConstScalar::Usize(len as u64))
750 .unwrap_or(ConstScalar::Unknown),
751 }),
752 };
753 TyKind::Array(elem_ty, cd.intern(&Interner)).intern(&Interner)
754 } 745 }
755 Expr::Literal(lit) => match lit { 746 Expr::Literal(lit) => match lit {
756 Literal::Bool(..) => TyKind::Scalar(Scalar::Bool).intern(&Interner), 747 Literal::Bool(..) => TyKind::Scalar(Scalar::Bool).intern(&Interner),
@@ -758,11 +749,12 @@ impl<'a> InferenceContext<'a> {
758 TyKind::Ref(Mutability::Not, static_lifetime(), TyKind::Str.intern(&Interner)) 749 TyKind::Ref(Mutability::Not, static_lifetime(), TyKind::Str.intern(&Interner))
759 .intern(&Interner) 750 .intern(&Interner)
760 } 751 }
761 Literal::ByteString(..) => { 752 Literal::ByteString(bs) => {
762 let byte_type = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(&Interner); 753 let byte_type = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(&Interner);
763 754
764 let array_type = 755 let len = consteval::usize_const(Some(bs.len() as u64));
765 TyKind::Array(byte_type, dummy_usize_const()).intern(&Interner); 756
757 let array_type = TyKind::Array(byte_type, len).intern(&Interner);
766 TyKind::Ref(Mutability::Not, static_lifetime(), array_type).intern(&Interner) 758 TyKind::Ref(Mutability::Not, static_lifetime(), array_type).intern(&Interner)
767 } 759 }
768 Literal::Char(..) => TyKind::Scalar(Scalar::Char).intern(&Interner), 760 Literal::Char(..) => TyKind::Scalar(Scalar::Char).intern(&Interner),
diff --git a/crates/hir_ty/src/infer/pat.rs b/crates/hir_ty/src/infer/pat.rs
index aea354cde..60b94a642 100644
--- a/crates/hir_ty/src/infer/pat.rs
+++ b/crates/hir_ty/src/infer/pat.rs
@@ -126,11 +126,12 @@ impl<'a> InferenceContext<'a> {
126 _ => &[], 126 _ => &[],
127 }; 127 };
128 128
129 let (pre, post) = match ellipsis { 129 let ((pre, post), n_uncovered_patterns) = match ellipsis {
130 Some(idx) => args.split_at(idx), 130 Some(idx) => {
131 None => (&args[..], &[][..]), 131 (args.split_at(idx), expectations.len().saturating_sub(args.len()))
132 }
133 None => ((&args[..], &[][..]), 0),
132 }; 134 };
133 let n_uncovered_patterns = expectations.len().saturating_sub(args.len());
134 let err_ty = self.err_ty(); 135 let err_ty = self.err_ty();
135 let mut expectations_iter = 136 let mut expectations_iter =
136 expectations.iter().map(|a| a.assert_ty_ref(&Interner)).chain(repeat(&err_ty)); 137 expectations.iter().map(|a| a.assert_ty_ref(&Interner)).chain(repeat(&err_ty));
diff --git a/crates/hir_ty/src/interner.rs b/crates/hir_ty/src/interner.rs
index 4cbc9cd4f..7b4119747 100644
--- a/crates/hir_ty/src/interner.rs
+++ b/crates/hir_ty/src/interner.rs
@@ -1,11 +1,12 @@
1//! Implementation of the Chalk `Interner` trait, which allows customizing the 1//! Implementation of the Chalk `Interner` trait, which allows customizing the
2//! representation of the various objects Chalk deals with (types, goals etc.). 2//! representation of the various objects Chalk deals with (types, goals etc.).
3 3
4use crate::{chalk_db, consts::ConstScalar, tls, GenericArg}; 4use crate::{chalk_db, tls, GenericArg};
5use base_db::salsa::InternId; 5use base_db::salsa::InternId;
6use chalk_ir::{Goal, GoalData}; 6use chalk_ir::{Goal, GoalData};
7use hir_def::{ 7use hir_def::{
8 intern::{impl_internable, InternStorage, Internable, Interned}, 8 intern::{impl_internable, InternStorage, Internable, Interned},
9 type_ref::ConstScalar,
9 TypeAliasId, 10 TypeAliasId,
10}; 11};
11use smallvec::SmallVec; 12use smallvec::SmallVec;
diff --git a/crates/hir_ty/src/lib.rs b/crates/hir_ty/src/lib.rs
index d23eff513..15b61bedc 100644
--- a/crates/hir_ty/src/lib.rs
+++ b/crates/hir_ty/src/lib.rs
@@ -10,9 +10,9 @@ mod autoderef;
10mod builder; 10mod builder;
11mod chalk_db; 11mod chalk_db;
12mod chalk_ext; 12mod chalk_ext;
13pub mod consteval;
13mod infer; 14mod infer;
14mod interner; 15mod interner;
15mod consts;
16mod lower; 16mod lower;
17mod mapping; 17mod mapping;
18mod op; 18mod op;
@@ -38,9 +38,13 @@ use chalk_ir::{
38 interner::HasInterner, 38 interner::HasInterner,
39 UintTy, 39 UintTy,
40}; 40};
41use hir_def::{expr::ExprId, type_ref::Rawness, TypeParamId}; 41use hir_def::{
42 expr::ExprId,
43 type_ref::{ConstScalar, Rawness},
44 TypeParamId,
45};
42 46
43use crate::{consts::ConstScalar, db::HirDatabase, display::HirDisplay, utils::generics}; 47use crate::{db::HirDatabase, display::HirDisplay, utils::generics};
44 48
45pub use autoderef::autoderef; 49pub use autoderef::autoderef;
46pub use builder::TyBuilder; 50pub use builder::TyBuilder;
diff --git a/crates/hir_ty/src/lower.rs b/crates/hir_ty/src/lower.rs
index 9751b45e4..bd8bb6028 100644
--- a/crates/hir_ty/src/lower.rs
+++ b/crates/hir_ty/src/lower.rs
@@ -29,8 +29,8 @@ use stdx::impl_from;
29use syntax::ast; 29use syntax::ast;
30 30
31use crate::{ 31use crate::{
32 consteval,
32 db::HirDatabase, 33 db::HirDatabase,
33 dummy_usize_const,
34 mapping::ToChalk, 34 mapping::ToChalk,
35 static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx, 35 static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
36 utils::{ 36 utils::{
@@ -172,11 +172,12 @@ impl<'a> TyLoweringContext<'a> {
172 let inner_ty = self.lower_ty(inner); 172 let inner_ty = self.lower_ty(inner);
173 TyKind::Raw(lower_to_chalk_mutability(*mutability), inner_ty).intern(&Interner) 173 TyKind::Raw(lower_to_chalk_mutability(*mutability), inner_ty).intern(&Interner)
174 } 174 }
175 TypeRef::Array(inner) => { 175 TypeRef::Array(inner, len) => {
176 let inner_ty = self.lower_ty(inner); 176 let inner_ty = self.lower_ty(inner);
177 // FIXME: we don't have length info here because we don't store an expression for 177
178 // the length 178 let const_len = consteval::usize_const(len.as_usize());
179 TyKind::Array(inner_ty, dummy_usize_const()).intern(&Interner) 179
180 TyKind::Array(inner_ty, const_len).intern(&Interner)
180 } 181 }
181 TypeRef::Slice(inner) => { 182 TypeRef::Slice(inner) => {
182 let inner_ty = self.lower_ty(inner); 183 let inner_ty = self.lower_ty(inner);
diff --git a/crates/hir_ty/src/tests/coercion.rs b/crates/hir_ty/src/tests/coercion.rs
index aad3d610e..190471069 100644
--- a/crates/hir_ty/src/tests/coercion.rs
+++ b/crates/hir_ty/src/tests/coercion.rs
@@ -64,42 +64,42 @@ fn coerce_places() {
64 81..92 '{ loop {} }': T 64 81..92 '{ loop {} }': T
65 83..90 'loop {}': ! 65 83..90 'loop {}': !
66 88..90 '{}': () 66 88..90 '{}': ()
67 121..132 '{ loop {} }': *mut [T; _] 67 121..132 '{ loop {} }': *mut [T; 2]
68 123..130 'loop {}': ! 68 123..130 'loop {}': !
69 128..130 '{}': () 69 128..130 '{}': ()
70 159..172 '{ gen() }': *mut [U] 70 159..172 '{ gen() }': *mut [U]
71 165..168 'gen': fn gen<U>() -> *mut [U; _] 71 165..168 'gen': fn gen<U>() -> *mut [U; 2]
72 165..170 'gen()': *mut [U; _] 72 165..170 'gen()': *mut [U; 2]
73 185..419 '{ ...rr); }': () 73 185..419 '{ ...rr); }': ()
74 195..198 'arr': &[u8; _] 74 195..198 'arr': &[u8; 1]
75 211..215 '&[1]': &[u8; 1] 75 211..215 '&[1]': &[u8; 1]
76 212..215 '[1]': [u8; 1] 76 212..215 '[1]': [u8; 1]
77 213..214 '1': u8 77 213..214 '1': u8
78 226..227 'a': &[u8] 78 226..227 'a': &[u8]
79 236..239 'arr': &[u8; _] 79 236..239 'arr': &[u8; 1]
80 249..250 'b': u8 80 249..250 'b': u8
81 253..254 'f': fn f<u8>(&[u8]) -> u8 81 253..254 'f': fn f<u8>(&[u8]) -> u8
82 253..259 'f(arr)': u8 82 253..259 'f(arr)': u8
83 255..258 'arr': &[u8; _] 83 255..258 'arr': &[u8; 1]
84 269..270 'c': &[u8] 84 269..270 'c': &[u8]
85 279..286 '{ arr }': &[u8] 85 279..286 '{ arr }': &[u8]
86 281..284 'arr': &[u8; _] 86 281..284 'arr': &[u8; 1]
87 296..297 'd': u8 87 296..297 'd': u8
88 300..301 'g': fn g<u8>(S<&[u8]>) -> u8 88 300..301 'g': fn g<u8>(S<&[u8]>) -> u8
89 300..315 'g(S { a: arr })': u8 89 300..315 'g(S { a: arr })': u8
90 302..314 'S { a: arr }': S<&[u8]> 90 302..314 'S { a: arr }': S<&[u8]>
91 309..312 'arr': &[u8; _] 91 309..312 'arr': &[u8; 1]
92 325..326 'e': [&[u8]; _] 92 325..326 'e': [&[u8]; 1]
93 340..345 '[arr]': [&[u8]; 1] 93 340..345 '[arr]': [&[u8]; 1]
94 341..344 'arr': &[u8; _] 94 341..344 'arr': &[u8; 1]
95 355..356 'f': [&[u8]; _] 95 355..356 'f': [&[u8]; 2]
96 370..378 '[arr; 2]': [&[u8]; _] 96 370..378 '[arr; 2]': [&[u8]; 2]
97 371..374 'arr': &[u8; _] 97 371..374 'arr': &[u8; 1]
98 376..377 '2': usize 98 376..377 '2': usize
99 388..389 'g': (&[u8], &[u8]) 99 388..389 'g': (&[u8], &[u8])
100 406..416 '(arr, arr)': (&[u8], &[u8]) 100 406..416 '(arr, arr)': (&[u8], &[u8])
101 407..410 'arr': &[u8; _] 101 407..410 'arr': &[u8; 1]
102 412..415 'arr': &[u8; _] 102 412..415 'arr': &[u8; 1]
103 "#]], 103 "#]],
104 ); 104 );
105} 105}
@@ -159,7 +159,7 @@ fn infer_custom_coerce_unsized() {
159 impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {} 159 impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
160 impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {} 160 impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
161 "#, 161 "#,
162 expect![[r" 162 expect![[r#"
163 257..258 'x': A<[T]> 163 257..258 'x': A<[T]>
164 278..283 '{ x }': A<[T]> 164 278..283 '{ x }': A<[T]>
165 280..281 'x': A<[T]> 165 280..281 'x': A<[T]>
@@ -169,23 +169,23 @@ fn infer_custom_coerce_unsized() {
169 333..334 'x': C<[T]> 169 333..334 'x': C<[T]>
170 354..359 '{ x }': C<[T]> 170 354..359 '{ x }': C<[T]>
171 356..357 'x': C<[T]> 171 356..357 'x': C<[T]>
172 369..370 'a': A<[u8; _]> 172 369..370 'a': A<[u8; 2]>
173 384..385 'b': B<[u8; _]> 173 384..385 'b': B<[u8; 2]>
174 399..400 'c': C<[u8; _]> 174 399..400 'c': C<[u8; 2]>
175 414..480 '{ ...(c); }': () 175 414..480 '{ ...(c); }': ()
176 424..425 'd': A<[{unknown}]> 176 424..425 'd': A<[{unknown}]>
177 428..432 'foo1': fn foo1<{unknown}>(A<[{unknown}]>) -> A<[{unknown}]> 177 428..432 'foo1': fn foo1<{unknown}>(A<[{unknown}]>) -> A<[{unknown}]>
178 428..435 'foo1(a)': A<[{unknown}]> 178 428..435 'foo1(a)': A<[{unknown}]>
179 433..434 'a': A<[u8; _]> 179 433..434 'a': A<[u8; 2]>
180 445..446 'e': B<[u8]> 180 445..446 'e': B<[u8]>
181 449..453 'foo2': fn foo2<u8>(B<[u8]>) -> B<[u8]> 181 449..453 'foo2': fn foo2<u8>(B<[u8]>) -> B<[u8]>
182 449..456 'foo2(b)': B<[u8]> 182 449..456 'foo2(b)': B<[u8]>
183 454..455 'b': B<[u8; _]> 183 454..455 'b': B<[u8; 2]>
184 466..467 'f': C<[u8]> 184 466..467 'f': C<[u8]>
185 470..474 'foo3': fn foo3<u8>(C<[u8]>) -> C<[u8]> 185 470..474 'foo3': fn foo3<u8>(C<[u8]>) -> C<[u8]>
186 470..477 'foo3(c)': C<[u8]> 186 470..477 'foo3(c)': C<[u8]>
187 475..476 'c': C<[u8; _]> 187 475..476 'c': C<[u8; 2]>
188 "]], 188 "#]],
189 ); 189 );
190} 190}
191 191
diff --git a/crates/hir_ty/src/tests/patterns.rs b/crates/hir_ty/src/tests/patterns.rs
index 33305f208..787647e9f 100644
--- a/crates/hir_ty/src/tests/patterns.rs
+++ b/crates/hir_ty/src/tests/patterns.rs
@@ -345,19 +345,19 @@ fn infer_pattern_match_arr() {
345 "#, 345 "#,
346 expect![[r#" 346 expect![[r#"
347 10..179 '{ ... } }': () 347 10..179 '{ ... } }': ()
348 20..23 'arr': [f64; _] 348 20..23 'arr': [f64; 2]
349 36..46 '[0.0, 1.0]': [f64; 2] 349 36..46 '[0.0, 1.0]': [f64; 2]
350 37..40 '0.0': f64 350 37..40 '0.0': f64
351 42..45 '1.0': f64 351 42..45 '1.0': f64
352 52..177 'match ... }': () 352 52..177 'match ... }': ()
353 58..61 'arr': [f64; _] 353 58..61 'arr': [f64; 2]
354 72..80 '[1.0, a]': [f64; _] 354 72..80 '[1.0, a]': [f64; 2]
355 73..76 '1.0': f64 355 73..76 '1.0': f64
356 73..76 '1.0': f64 356 73..76 '1.0': f64
357 78..79 'a': f64 357 78..79 'a': f64
358 84..110 '{ ... }': () 358 84..110 '{ ... }': ()
359 98..99 'a': f64 359 98..99 'a': f64
360 120..126 '[b, c]': [f64; _] 360 120..126 '[b, c]': [f64; 2]
361 121..122 'b': f64 361 121..122 'b': f64
362 124..125 'c': f64 362 124..125 'c': f64
363 130..171 '{ ... }': () 363 130..171 '{ ... }': ()
@@ -732,7 +732,7 @@ fn foo(tuple: (u8, i16, f32)) {
732 111..112 'a': u8 732 111..112 'a': u8
733 114..115 'b': i16 733 114..115 'b': i16
734 124..126 '{}': () 734 124..126 '{}': ()
735 136..142 '(a, b)': (u8, i16, f32) 735 136..142 '(a, b)': (u8, i16)
736 137..138 'a': u8 736 137..138 'a': u8
737 140..141 'b': i16 737 140..141 'b': i16
738 146..161 '{/*too short*/}': () 738 146..161 '{/*too short*/}': ()
diff --git a/crates/hir_ty/src/tests/simple.rs b/crates/hir_ty/src/tests/simple.rs
index 8b09f2e4a..a9cd42186 100644
--- a/crates/hir_ty/src/tests/simple.rs
+++ b/crates/hir_ty/src/tests/simple.rs
@@ -488,23 +488,34 @@ fn infer_literals() {
488 mod foo {} 488 mod foo {}
489 "#; 489 "#;
490 br#"yolo"#; 490 br#"yolo"#;
491 let a = b"a\x20b\
492 c";
493 let b = br"g\
494h";
495 let c = br#"x"\"yb"#;
491 } 496 }
492 "##, 497 "##,
493 expect![[r##" 498 expect![[r##"
494 10..216 '{ ...o"#; }': () 499 18..478 '{ ... }': ()
495 16..20 '5i32': i32 500 32..36 '5i32': i32
496 26..30 '5f32': f32 501 50..54 '5f32': f32
497 36..40 '5f64': f64 502 68..72 '5f64': f64
498 46..53 '"hello"': &str 503 86..93 '"hello"': &str
499 59..67 'b"bytes"': &[u8; _] 504 107..115 'b"bytes"': &[u8; 5]
500 73..76 ''c'': char 505 129..132 ''c'': char
501 82..86 'b'b'': u8 506 146..150 'b'b'': u8
502 92..96 '3.14': f64 507 164..168 '3.14': f64
503 102..106 '5000': i32 508 182..186 '5000': i32
504 112..117 'false': bool 509 200..205 'false': bool
505 123..127 'true': bool 510 219..223 'true': bool
506 133..197 'r#" ... "#': &str 511 237..333 'r#" ... "#': &str
507 203..213 'br#"yolo"#': &[u8; _] 512 347..357 'br#"yolo"#': &[u8; 4]
513 375..376 'a': &[u8; 4]
514 379..403 'b"a\x2... c"': &[u8; 4]
515 421..422 'b': &[u8; 4]
516 425..433 'br"g\ h"': &[u8; 4]
517 451..452 'c': &[u8; 6]
518 455..467 'br#"x"\"yb"#': &[u8; 6]
508 "##]], 519 "##]],
509 ); 520 );
510} 521}
@@ -1260,12 +1271,14 @@ fn infer_array() {
1260 1271
1261 let b = [a, ["b"]]; 1272 let b = [a, ["b"]];
1262 let x: [u8; 0] = []; 1273 let x: [u8; 0] = [];
1274 // FIXME: requires const evaluation/taking type from rhs somehow
1275 let y: [u8; 2+2] = [1,2,3,4];
1263 } 1276 }
1264 "#, 1277 "#,
1265 expect![[r#" 1278 expect![[r#"
1266 8..9 'x': &str 1279 8..9 'x': &str
1267 17..18 'y': isize 1280 17..18 'y': isize
1268 27..292 '{ ... []; }': () 1281 27..395 '{ ...,4]; }': ()
1269 37..38 'a': [&str; 1] 1282 37..38 'a': [&str; 1]
1270 41..44 '[x]': [&str; 1] 1283 41..44 '[x]': [&str; 1]
1271 42..43 'x': &str 1284 42..43 'x': &str
@@ -1313,8 +1326,14 @@ fn infer_array() {
1313 255..256 'a': [&str; 1] 1326 255..256 'a': [&str; 1]
1314 258..263 '["b"]': [&str; 1] 1327 258..263 '["b"]': [&str; 1]
1315 259..262 '"b"': &str 1328 259..262 '"b"': &str
1316 274..275 'x': [u8; _] 1329 274..275 'x': [u8; 0]
1317 287..289 '[]': [u8; 0] 1330 287..289 '[]': [u8; 0]
1331 368..369 'y': [u8; _]
1332 383..392 '[1,2,3,4]': [u8; 4]
1333 384..385 '1': u8
1334 386..387 '2': u8
1335 388..389 '3': u8
1336 390..391 '4': u8
1318 "#]], 1337 "#]],
1319 ); 1338 );
1320} 1339}
@@ -2409,38 +2428,38 @@ fn infer_operator_overload() {
2409 320..422 '{ ... }': V2 2428 320..422 '{ ... }': V2
2410 334..335 'x': f32 2429 334..335 'x': f32
2411 338..342 'self': V2 2430 338..342 'self': V2
2412 338..344 'self.0': [f32; _] 2431 338..344 'self.0': [f32; 2]
2413 338..347 'self.0[0]': {unknown} 2432 338..347 'self.0[0]': {unknown}
2414 338..358 'self.0...s.0[0]': f32 2433 338..358 'self.0...s.0[0]': f32
2415 345..346 '0': i32 2434 345..346 '0': i32
2416 350..353 'rhs': V2 2435 350..353 'rhs': V2
2417 350..355 'rhs.0': [f32; _] 2436 350..355 'rhs.0': [f32; 2]
2418 350..358 'rhs.0[0]': {unknown} 2437 350..358 'rhs.0[0]': {unknown}
2419 356..357 '0': i32 2438 356..357 '0': i32
2420 372..373 'y': f32 2439 372..373 'y': f32
2421 376..380 'self': V2 2440 376..380 'self': V2
2422 376..382 'self.0': [f32; _] 2441 376..382 'self.0': [f32; 2]
2423 376..385 'self.0[1]': {unknown} 2442 376..385 'self.0[1]': {unknown}
2424 376..396 'self.0...s.0[1]': f32 2443 376..396 'self.0...s.0[1]': f32
2425 383..384 '1': i32 2444 383..384 '1': i32
2426 388..391 'rhs': V2 2445 388..391 'rhs': V2
2427 388..393 'rhs.0': [f32; _] 2446 388..393 'rhs.0': [f32; 2]
2428 388..396 'rhs.0[1]': {unknown} 2447 388..396 'rhs.0[1]': {unknown}
2429 394..395 '1': i32 2448 394..395 '1': i32
2430 406..408 'V2': V2([f32; _]) -> V2 2449 406..408 'V2': V2([f32; 2]) -> V2
2431 406..416 'V2([x, y])': V2 2450 406..416 'V2([x, y])': V2
2432 409..415 '[x, y]': [f32; 2] 2451 409..415 '[x, y]': [f32; 2]
2433 410..411 'x': f32 2452 410..411 'x': f32
2434 413..414 'y': f32 2453 413..414 'y': f32
2435 436..519 '{ ... vb; }': () 2454 436..519 '{ ... vb; }': ()
2436 446..448 'va': V2 2455 446..448 'va': V2
2437 451..453 'V2': V2([f32; _]) -> V2 2456 451..453 'V2': V2([f32; 2]) -> V2
2438 451..465 'V2([0.0, 1.0])': V2 2457 451..465 'V2([0.0, 1.0])': V2
2439 454..464 '[0.0, 1.0]': [f32; 2] 2458 454..464 '[0.0, 1.0]': [f32; 2]
2440 455..458 '0.0': f32 2459 455..458 '0.0': f32
2441 460..463 '1.0': f32 2460 460..463 '1.0': f32
2442 475..477 'vb': V2 2461 475..477 'vb': V2
2443 480..482 'V2': V2([f32; _]) -> V2 2462 480..482 'V2': V2([f32; 2]) -> V2
2444 480..494 'V2([0.0, 1.0])': V2 2463 480..494 'V2([0.0, 1.0])': V2
2445 483..493 '[0.0, 1.0]': [f32; 2] 2464 483..493 '[0.0, 1.0]': [f32; 2]
2446 484..487 '0.0': f32 2465 484..487 '0.0': f32
diff --git a/crates/hir_ty/src/tests/traits.rs b/crates/hir_ty/src/tests/traits.rs
index 47a1455fd..f80cf9879 100644
--- a/crates/hir_ty/src/tests/traits.rs
+++ b/crates/hir_ty/src/tests/traits.rs
@@ -3474,3 +3474,100 @@ fn main(){
3474 "#]], 3474 "#]],
3475 ) 3475 )
3476} 3476}
3477
3478#[test]
3479fn array_length() {
3480 check_infer(
3481 r#"
3482trait T {
3483 type Output;
3484 fn do_thing(&self) -> Self::Output;
3485}
3486
3487impl T for [u8; 4] {
3488 type Output = usize;
3489 fn do_thing(&self) -> Self::Output {
3490 2
3491 }
3492}
3493
3494impl T for [u8; 2] {
3495 type Output = u8;
3496 fn do_thing(&self) -> Self::Output {
3497 2
3498 }
3499}
3500
3501fn main() {
3502 let v = [0u8; 2];
3503 let v2 = v.do_thing();
3504 let v3 = [0u8; 4];
3505 let v4 = v3.do_thing();
3506}
3507"#,
3508 expect![[r#"
3509 44..48 'self': &Self
3510 133..137 'self': &[u8; 4]
3511 155..172 '{ ... }': usize
3512 165..166 '2': usize
3513 236..240 'self': &[u8; 2]
3514 258..275 '{ ... }': u8
3515 268..269 '2': u8
3516 289..392 '{ ...g(); }': ()
3517 299..300 'v': [u8; 2]
3518 303..311 '[0u8; 2]': [u8; 2]
3519 304..307 '0u8': u8
3520 309..310 '2': usize
3521 321..323 'v2': u8
3522 326..327 'v': [u8; 2]
3523 326..338 'v.do_thing()': u8
3524 348..350 'v3': [u8; 4]
3525 353..361 '[0u8; 4]': [u8; 4]
3526 354..357 '0u8': u8
3527 359..360 '4': usize
3528 371..373 'v4': usize
3529 376..378 'v3': [u8; 4]
3530 376..389 'v3.do_thing()': usize
3531 "#]],
3532 )
3533}
3534
3535// FIXME: We should infer the length of the returned array :)
3536#[test]
3537fn const_generics() {
3538 check_infer(
3539 r#"
3540trait T {
3541 type Output;
3542 fn do_thing(&self) -> Self::Output;
3543}
3544
3545impl<const L: usize> T for [u8; L] {
3546 type Output = [u8; L];
3547 fn do_thing(&self) -> Self::Output {
3548 *self
3549 }
3550}
3551
3552fn main() {
3553 let v = [0u8; 2];
3554 let v2 = v.do_thing();
3555}
3556"#,
3557 expect![[r#"
3558 44..48 'self': &Self
3559 151..155 'self': &[u8; _]
3560 173..194 '{ ... }': [u8; _]
3561 183..188 '*self': [u8; _]
3562 184..188 'self': &[u8; _]
3563 208..260 '{ ...g(); }': ()
3564 218..219 'v': [u8; 2]
3565 222..230 '[0u8; 2]': [u8; 2]
3566 223..226 '0u8': u8
3567 228..229 '2': usize
3568 240..242 'v2': [u8; _]
3569 245..246 'v': [u8; 2]
3570 245..257 'v.do_thing()': [u8; _]
3571 "#]],
3572 )
3573}
diff --git a/crates/ide/src/diagnostics.rs b/crates/ide/src/diagnostics.rs
index 273d8cfbb..27d347dbd 100644
--- a/crates/ide/src/diagnostics.rs
+++ b/crates/ide/src/diagnostics.rs
@@ -28,7 +28,7 @@ use unlinked_file::UnlinkedFile;
28 28
29use crate::{Assist, AssistId, AssistKind, FileId, Label, SourceChange}; 29use crate::{Assist, AssistId, AssistKind, FileId, Label, SourceChange};
30 30
31use self::fixes::DiagnosticWithFix; 31use self::fixes::DiagnosticWithFixes;
32 32
33#[derive(Debug)] 33#[derive(Debug)]
34pub struct Diagnostic { 34pub struct Diagnostic {
@@ -36,14 +36,14 @@ pub struct Diagnostic {
36 pub message: String, 36 pub message: String,
37 pub range: TextRange, 37 pub range: TextRange,
38 pub severity: Severity, 38 pub severity: Severity,
39 pub fix: Option<Assist>, 39 pub fixes: Option<Vec<Assist>>,
40 pub unused: bool, 40 pub unused: bool,
41 pub code: Option<DiagnosticCode>, 41 pub code: Option<DiagnosticCode>,
42} 42}
43 43
44impl Diagnostic { 44impl Diagnostic {
45 fn error(range: TextRange, message: String) -> Self { 45 fn error(range: TextRange, message: String) -> Self {
46 Self { message, range, severity: Severity::Error, fix: None, unused: false, code: None } 46 Self { message, range, severity: Severity::Error, fixes: None, unused: false, code: None }
47 } 47 }
48 48
49 fn hint(range: TextRange, message: String) -> Self { 49 fn hint(range: TextRange, message: String) -> Self {
@@ -51,14 +51,14 @@ impl Diagnostic {
51 message, 51 message,
52 range, 52 range,
53 severity: Severity::WeakWarning, 53 severity: Severity::WeakWarning,
54 fix: None, 54 fixes: None,
55 unused: false, 55 unused: false,
56 code: None, 56 code: None,
57 } 57 }
58 } 58 }
59 59
60 fn with_fix(self, fix: Option<Assist>) -> Self { 60 fn with_fixes(self, fixes: Option<Vec<Assist>>) -> Self {
61 Self { fix, ..self } 61 Self { fixes, ..self }
62 } 62 }
63 63
64 fn with_unused(self, unused: bool) -> Self { 64 fn with_unused(self, unused: bool) -> Self {
@@ -154,7 +154,7 @@ pub(crate) fn diagnostics(
154 // Override severity and mark as unused. 154 // Override severity and mark as unused.
155 res.borrow_mut().push( 155 res.borrow_mut().push(
156 Diagnostic::hint(range, d.message()) 156 Diagnostic::hint(range, d.message())
157 .with_fix(d.fix(&sema, resolve)) 157 .with_fixes(d.fixes(&sema, resolve))
158 .with_code(Some(d.code())), 158 .with_code(Some(d.code())),
159 ); 159 );
160 }) 160 })
@@ -210,23 +210,23 @@ pub(crate) fn diagnostics(
210 res.into_inner() 210 res.into_inner()
211} 211}
212 212
213fn diagnostic_with_fix<D: DiagnosticWithFix>( 213fn diagnostic_with_fix<D: DiagnosticWithFixes>(
214 d: &D, 214 d: &D,
215 sema: &Semantics<RootDatabase>, 215 sema: &Semantics<RootDatabase>,
216 resolve: &AssistResolveStrategy, 216 resolve: &AssistResolveStrategy,
217) -> Diagnostic { 217) -> Diagnostic {
218 Diagnostic::error(sema.diagnostics_display_range(d.display_source()).range, d.message()) 218 Diagnostic::error(sema.diagnostics_display_range(d.display_source()).range, d.message())
219 .with_fix(d.fix(&sema, resolve)) 219 .with_fixes(d.fixes(&sema, resolve))
220 .with_code(Some(d.code())) 220 .with_code(Some(d.code()))
221} 221}
222 222
223fn warning_with_fix<D: DiagnosticWithFix>( 223fn warning_with_fix<D: DiagnosticWithFixes>(
224 d: &D, 224 d: &D,
225 sema: &Semantics<RootDatabase>, 225 sema: &Semantics<RootDatabase>,
226 resolve: &AssistResolveStrategy, 226 resolve: &AssistResolveStrategy,
227) -> Diagnostic { 227) -> Diagnostic {
228 Diagnostic::hint(sema.diagnostics_display_range(d.display_source()).range, d.message()) 228 Diagnostic::hint(sema.diagnostics_display_range(d.display_source()).range, d.message())
229 .with_fix(d.fix(&sema, resolve)) 229 .with_fixes(d.fixes(&sema, resolve))
230 .with_code(Some(d.code())) 230 .with_code(Some(d.code()))
231} 231}
232 232
@@ -256,12 +256,12 @@ fn check_unnecessary_braces_in_use_statement(
256 256
257 acc.push( 257 acc.push(
258 Diagnostic::hint(use_range, "Unnecessary braces in use statement".to_string()) 258 Diagnostic::hint(use_range, "Unnecessary braces in use statement".to_string())
259 .with_fix(Some(fix( 259 .with_fixes(Some(vec![fix(
260 "remove_braces", 260 "remove_braces",
261 "Remove unnecessary braces", 261 "Remove unnecessary braces",
262 SourceChange::from_text_edit(file_id, edit), 262 SourceChange::from_text_edit(file_id, edit),
263 use_range, 263 use_range,
264 ))), 264 )])),
265 ); 265 );
266 } 266 }
267 267
@@ -309,9 +309,23 @@ mod tests {
309 /// Takes a multi-file input fixture with annotated cursor positions, 309 /// Takes a multi-file input fixture with annotated cursor positions,
310 /// and checks that: 310 /// and checks that:
311 /// * a diagnostic is produced 311 /// * a diagnostic is produced
312 /// * this diagnostic fix trigger range touches the input cursor position 312 /// * the first diagnostic fix trigger range touches the input cursor position
313 /// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied 313 /// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied
314 pub(crate) fn check_fix(ra_fixture_before: &str, ra_fixture_after: &str) { 314 pub(crate) fn check_fix(ra_fixture_before: &str, ra_fixture_after: &str) {
315 check_nth_fix(0, ra_fixture_before, ra_fixture_after);
316 }
317 /// Takes a multi-file input fixture with annotated cursor positions,
318 /// and checks that:
319 /// * a diagnostic is produced
320 /// * every diagnostic fixes trigger range touches the input cursor position
321 /// * that the contents of the file containing the cursor match `after` after each diagnostic fix is applied
322 pub(crate) fn check_fixes(ra_fixture_before: &str, ra_fixtures_after: Vec<&str>) {
323 for (i, ra_fixture_after) in ra_fixtures_after.iter().enumerate() {
324 check_nth_fix(i, ra_fixture_before, ra_fixture_after)
325 }
326 }
327
328 fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
315 let after = trim_indent(ra_fixture_after); 329 let after = trim_indent(ra_fixture_after);
316 330
317 let (analysis, file_position) = fixture::position(ra_fixture_before); 331 let (analysis, file_position) = fixture::position(ra_fixture_before);
@@ -324,9 +338,9 @@ mod tests {
324 .unwrap() 338 .unwrap()
325 .pop() 339 .pop()
326 .unwrap(); 340 .unwrap();
327 let fix = diagnostic.fix.unwrap(); 341 let fix = &diagnostic.fixes.unwrap()[nth];
328 let actual = { 342 let actual = {
329 let source_change = fix.source_change.unwrap(); 343 let source_change = fix.source_change.as_ref().unwrap();
330 let file_id = *source_change.source_file_edits.keys().next().unwrap(); 344 let file_id = *source_change.source_file_edits.keys().next().unwrap();
331 let mut actual = analysis.file_text(file_id).unwrap().to_string(); 345 let mut actual = analysis.file_text(file_id).unwrap().to_string();
332 346
@@ -344,7 +358,6 @@ mod tests {
344 file_position.offset 358 file_position.offset
345 ); 359 );
346 } 360 }
347
348 /// Checks that there's a diagnostic *without* fix at `$0`. 361 /// Checks that there's a diagnostic *without* fix at `$0`.
349 fn check_no_fix(ra_fixture: &str) { 362 fn check_no_fix(ra_fixture: &str) {
350 let (analysis, file_position) = fixture::position(ra_fixture); 363 let (analysis, file_position) = fixture::position(ra_fixture);
@@ -357,7 +370,7 @@ mod tests {
357 .unwrap() 370 .unwrap()
358 .pop() 371 .pop()
359 .unwrap(); 372 .unwrap();
360 assert!(diagnostic.fix.is_none(), "got a fix when none was expected: {:?}", diagnostic); 373 assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {:?}", diagnostic);
361 } 374 }
362 375
363 /// Takes a multi-file input fixture with annotated cursor position and checks that no diagnostics 376 /// Takes a multi-file input fixture with annotated cursor position and checks that no diagnostics
@@ -375,7 +388,7 @@ mod tests {
375 assert_eq!(diagnostics.len(), 0, "unexpected diagnostics:\n{:#?}", diagnostics); 388 assert_eq!(diagnostics.len(), 0, "unexpected diagnostics:\n{:#?}", diagnostics);
376 } 389 }
377 390
378 fn check_expect(ra_fixture: &str, expect: Expect) { 391 pub(crate) fn check_expect(ra_fixture: &str, expect: Expect) {
379 let (analysis, file_id) = fixture::file(ra_fixture); 392 let (analysis, file_id) = fixture::file(ra_fixture);
380 let diagnostics = analysis 393 let diagnostics = analysis
381 .diagnostics(&DiagnosticsConfig::default(), AssistResolveStrategy::All, file_id) 394 .diagnostics(&DiagnosticsConfig::default(), AssistResolveStrategy::All, file_id)
@@ -384,374 +397,6 @@ mod tests {
384 } 397 }
385 398
386 #[test] 399 #[test]
387 fn test_wrap_return_type_option() {
388 check_fix(
389 r#"
390//- /main.rs crate:main deps:core
391use core::option::Option::{self, Some, None};
392
393fn div(x: i32, y: i32) -> Option<i32> {
394 if y == 0 {
395 return None;
396 }
397 x / y$0
398}
399//- /core/lib.rs crate:core
400pub mod result {
401 pub enum Result<T, E> { Ok(T), Err(E) }
402}
403pub mod option {
404 pub enum Option<T> { Some(T), None }
405}
406"#,
407 r#"
408use core::option::Option::{self, Some, None};
409
410fn div(x: i32, y: i32) -> Option<i32> {
411 if y == 0 {
412 return None;
413 }
414 Some(x / y)
415}
416"#,
417 );
418 }
419
420 #[test]
421 fn test_wrap_return_type() {
422 check_fix(
423 r#"
424//- /main.rs crate:main deps:core
425use core::result::Result::{self, Ok, Err};
426
427fn div(x: i32, y: i32) -> Result<i32, ()> {
428 if y == 0 {
429 return Err(());
430 }
431 x / y$0
432}
433//- /core/lib.rs crate:core
434pub mod result {
435 pub enum Result<T, E> { Ok(T), Err(E) }
436}
437pub mod option {
438 pub enum Option<T> { Some(T), None }
439}
440"#,
441 r#"
442use core::result::Result::{self, Ok, Err};
443
444fn div(x: i32, y: i32) -> Result<i32, ()> {
445 if y == 0 {
446 return Err(());
447 }
448 Ok(x / y)
449}
450"#,
451 );
452 }
453
454 #[test]
455 fn test_wrap_return_type_handles_generic_functions() {
456 check_fix(
457 r#"
458//- /main.rs crate:main deps:core
459use core::result::Result::{self, Ok, Err};
460
461fn div<T>(x: T) -> Result<T, i32> {
462 if x == 0 {
463 return Err(7);
464 }
465 $0x
466}
467//- /core/lib.rs crate:core
468pub mod result {
469 pub enum Result<T, E> { Ok(T), Err(E) }
470}
471pub mod option {
472 pub enum Option<T> { Some(T), None }
473}
474"#,
475 r#"
476use core::result::Result::{self, Ok, Err};
477
478fn div<T>(x: T) -> Result<T, i32> {
479 if x == 0 {
480 return Err(7);
481 }
482 Ok(x)
483}
484"#,
485 );
486 }
487
488 #[test]
489 fn test_wrap_return_type_handles_type_aliases() {
490 check_fix(
491 r#"
492//- /main.rs crate:main deps:core
493use core::result::Result::{self, Ok, Err};
494
495type MyResult<T> = Result<T, ()>;
496
497fn div(x: i32, y: i32) -> MyResult<i32> {
498 if y == 0 {
499 return Err(());
500 }
501 x $0/ y
502}
503//- /core/lib.rs crate:core
504pub mod result {
505 pub enum Result<T, E> { Ok(T), Err(E) }
506}
507pub mod option {
508 pub enum Option<T> { Some(T), None }
509}
510"#,
511 r#"
512use core::result::Result::{self, Ok, Err};
513
514type MyResult<T> = Result<T, ()>;
515
516fn div(x: i32, y: i32) -> MyResult<i32> {
517 if y == 0 {
518 return Err(());
519 }
520 Ok(x / y)
521}
522"#,
523 );
524 }
525
526 #[test]
527 fn test_wrap_return_type_not_applicable_when_expr_type_does_not_match_ok_type() {
528 check_no_diagnostics(
529 r#"
530//- /main.rs crate:main deps:core
531use core::result::Result::{self, Ok, Err};
532
533fn foo() -> Result<(), i32> { 0 }
534
535//- /core/lib.rs crate:core
536pub mod result {
537 pub enum Result<T, E> { Ok(T), Err(E) }
538}
539pub mod option {
540 pub enum Option<T> { Some(T), None }
541}
542"#,
543 );
544 }
545
546 #[test]
547 fn test_wrap_return_type_not_applicable_when_return_type_is_not_result_or_option() {
548 check_no_diagnostics(
549 r#"
550//- /main.rs crate:main deps:core
551use core::result::Result::{self, Ok, Err};
552
553enum SomeOtherEnum { Ok(i32), Err(String) }
554
555fn foo() -> SomeOtherEnum { 0 }
556
557//- /core/lib.rs crate:core
558pub mod result {
559 pub enum Result<T, E> { Ok(T), Err(E) }
560}
561pub mod option {
562 pub enum Option<T> { Some(T), None }
563}
564"#,
565 );
566 }
567
568 #[test]
569 fn test_fill_struct_fields_empty() {
570 check_fix(
571 r#"
572struct TestStruct { one: i32, two: i64 }
573
574fn test_fn() {
575 let s = TestStruct {$0};
576}
577"#,
578 r#"
579struct TestStruct { one: i32, two: i64 }
580
581fn test_fn() {
582 let s = TestStruct { one: (), two: ()};
583}
584"#,
585 );
586 }
587
588 #[test]
589 fn test_fill_struct_fields_self() {
590 check_fix(
591 r#"
592struct TestStruct { one: i32 }
593
594impl TestStruct {
595 fn test_fn() { let s = Self {$0}; }
596}
597"#,
598 r#"
599struct TestStruct { one: i32 }
600
601impl TestStruct {
602 fn test_fn() { let s = Self { one: ()}; }
603}
604"#,
605 );
606 }
607
608 #[test]
609 fn test_fill_struct_fields_enum() {
610 check_fix(
611 r#"
612enum Expr {
613 Bin { lhs: Box<Expr>, rhs: Box<Expr> }
614}
615
616impl Expr {
617 fn new_bin(lhs: Box<Expr>, rhs: Box<Expr>) -> Expr {
618 Expr::Bin {$0 }
619 }
620}
621"#,
622 r#"
623enum Expr {
624 Bin { lhs: Box<Expr>, rhs: Box<Expr> }
625}
626
627impl Expr {
628 fn new_bin(lhs: Box<Expr>, rhs: Box<Expr>) -> Expr {
629 Expr::Bin { lhs: (), rhs: () }
630 }
631}
632"#,
633 );
634 }
635
636 #[test]
637 fn test_fill_struct_fields_partial() {
638 check_fix(
639 r#"
640struct TestStruct { one: i32, two: i64 }
641
642fn test_fn() {
643 let s = TestStruct{ two: 2$0 };
644}
645"#,
646 r"
647struct TestStruct { one: i32, two: i64 }
648
649fn test_fn() {
650 let s = TestStruct{ two: 2, one: () };
651}
652",
653 );
654 }
655
656 #[test]
657 fn test_fill_struct_fields_raw_ident() {
658 check_fix(
659 r#"
660struct TestStruct { r#type: u8 }
661
662fn test_fn() {
663 TestStruct { $0 };
664}
665"#,
666 r"
667struct TestStruct { r#type: u8 }
668
669fn test_fn() {
670 TestStruct { r#type: () };
671}
672",
673 );
674 }
675
676 #[test]
677 fn test_fill_struct_fields_no_diagnostic() {
678 check_no_diagnostics(
679 r"
680 struct TestStruct { one: i32, two: i64 }
681
682 fn test_fn() {
683 let one = 1;
684 let s = TestStruct{ one, two: 2 };
685 }
686 ",
687 );
688 }
689
690 #[test]
691 fn test_fill_struct_fields_no_diagnostic_on_spread() {
692 check_no_diagnostics(
693 r"
694 struct TestStruct { one: i32, two: i64 }
695
696 fn test_fn() {
697 let one = 1;
698 let s = TestStruct{ ..a };
699 }
700 ",
701 );
702 }
703
704 #[test]
705 fn test_unresolved_module_diagnostic() {
706 check_expect(
707 r#"mod foo;"#,
708 expect![[r#"
709 [
710 Diagnostic {
711 message: "unresolved module",
712 range: 0..8,
713 severity: Error,
714 fix: Some(
715 Assist {
716 id: AssistId(
717 "create_module",
718 QuickFix,
719 ),
720 label: "Create module",
721 group: None,
722 target: 0..8,
723 source_change: Some(
724 SourceChange {
725 source_file_edits: {},
726 file_system_edits: [
727 CreateFile {
728 dst: AnchoredPathBuf {
729 anchor: FileId(
730 0,
731 ),
732 path: "foo.rs",
733 },
734 initial_contents: "",
735 },
736 ],
737 is_snippet: false,
738 },
739 ),
740 },
741 ),
742 unused: false,
743 code: Some(
744 DiagnosticCode(
745 "unresolved-module",
746 ),
747 ),
748 },
749 ]
750 "#]],
751 );
752 }
753
754 #[test]
755 fn test_unresolved_macro_range() { 400 fn test_unresolved_macro_range() {
756 check_expect( 401 check_expect(
757 r#"foo::bar!(92);"#, 402 r#"foo::bar!(92);"#,
@@ -761,7 +406,7 @@ fn test_fn() {
761 message: "unresolved macro `foo::bar!`", 406 message: "unresolved macro `foo::bar!`",
762 range: 5..8, 407 range: 5..8,
763 severity: Error, 408 severity: Error,
764 fix: None, 409 fixes: None,
765 unused: false, 410 unused: false,
766 code: Some( 411 code: Some(
767 DiagnosticCode( 412 DiagnosticCode(
@@ -792,7 +437,7 @@ fn main() {
792pub struct Foo { pub a: i32, pub b: i32 } 437pub struct Foo { pub a: i32, pub b: i32 }
793"#, 438"#,
794 r#" 439 r#"
795fn some(, b: ()) {} 440fn some(, b: () ) {}
796fn items() {} 441fn items() {}
797fn here() {} 442fn here() {}
798 443
@@ -891,53 +536,6 @@ mod a {
891 } 536 }
892 537
893 #[test] 538 #[test]
894 fn test_add_field_from_usage() {
895 check_fix(
896 r"
897fn main() {
898 Foo { bar: 3, baz$0: false};
899}
900struct Foo {
901 bar: i32
902}
903",
904 r"
905fn main() {
906 Foo { bar: 3, baz: false};
907}
908struct Foo {
909 bar: i32,
910 baz: bool
911}
912",
913 )
914 }
915
916 #[test]
917 fn test_add_field_in_other_file_from_usage() {
918 check_fix(
919 r#"
920//- /main.rs
921mod foo;
922
923fn main() {
924 foo::Foo { bar: 3, $0baz: false};
925}
926//- /foo.rs
927struct Foo {
928 bar: i32
929}
930"#,
931 r#"
932struct Foo {
933 bar: i32,
934 pub(crate) baz: bool
935}
936"#,
937 )
938 }
939
940 #[test]
941 fn test_disabled_diagnostics() { 539 fn test_disabled_diagnostics() {
942 let mut config = DiagnosticsConfig::default(); 540 let mut config = DiagnosticsConfig::default();
943 config.disabled.insert("unresolved-module".into()); 541 config.disabled.insert("unresolved-module".into());
@@ -955,134 +553,28 @@ struct Foo {
955 } 553 }
956 554
957 #[test] 555 #[test]
958 fn test_rename_incorrect_case() {
959 check_fix(
960 r#"
961pub struct test_struct$0 { one: i32 }
962
963pub fn some_fn(val: test_struct) -> test_struct {
964 test_struct { one: val.one + 1 }
965}
966"#,
967 r#"
968pub struct TestStruct { one: i32 }
969
970pub fn some_fn(val: TestStruct) -> TestStruct {
971 TestStruct { one: val.one + 1 }
972}
973"#,
974 );
975
976 check_fix(
977 r#"
978pub fn some_fn(NonSnakeCase$0: u8) -> u8 {
979 NonSnakeCase
980}
981"#,
982 r#"
983pub fn some_fn(non_snake_case: u8) -> u8 {
984 non_snake_case
985}
986"#,
987 );
988
989 check_fix(
990 r#"
991pub fn SomeFn$0(val: u8) -> u8 {
992 if val != 0 { SomeFn(val - 1) } else { val }
993}
994"#,
995 r#"
996pub fn some_fn(val: u8) -> u8 {
997 if val != 0 { some_fn(val - 1) } else { val }
998}
999"#,
1000 );
1001
1002 check_fix(
1003 r#"
1004fn some_fn() {
1005 let whatAWeird_Formatting$0 = 10;
1006 another_func(whatAWeird_Formatting);
1007}
1008"#,
1009 r#"
1010fn some_fn() {
1011 let what_a_weird_formatting = 10;
1012 another_func(what_a_weird_formatting);
1013}
1014"#,
1015 );
1016 }
1017
1018 #[test]
1019 fn test_uppercase_const_no_diagnostics() {
1020 check_no_diagnostics(
1021 r#"
1022fn foo() {
1023 const ANOTHER_ITEM$0: &str = "some_item";
1024}
1025"#,
1026 );
1027 }
1028
1029 #[test]
1030 fn test_rename_incorrect_case_struct_method() {
1031 check_fix(
1032 r#"
1033pub struct TestStruct;
1034
1035impl TestStruct {
1036 pub fn SomeFn$0() -> TestStruct {
1037 TestStruct
1038 }
1039}
1040"#,
1041 r#"
1042pub struct TestStruct;
1043
1044impl TestStruct {
1045 pub fn some_fn() -> TestStruct {
1046 TestStruct
1047 }
1048}
1049"#,
1050 );
1051 }
1052
1053 #[test]
1054 fn test_single_incorrect_case_diagnostic_in_function_name_issue_6970() {
1055 let input = r#"fn FOO$0() {}"#;
1056 let expected = r#"fn foo() {}"#;
1057
1058 let (analysis, file_position) = fixture::position(input);
1059 let diagnostics = analysis
1060 .diagnostics(
1061 &DiagnosticsConfig::default(),
1062 AssistResolveStrategy::All,
1063 file_position.file_id,
1064 )
1065 .unwrap();
1066 assert_eq!(diagnostics.len(), 1);
1067
1068 check_fix(input, expected);
1069 }
1070
1071 #[test]
1072 fn unlinked_file_prepend_first_item() { 556 fn unlinked_file_prepend_first_item() {
1073 cov_mark::check!(unlinked_file_prepend_before_first_item); 557 cov_mark::check!(unlinked_file_prepend_before_first_item);
1074 check_fix( 558 // Only tests the first one for `pub mod` since the rest are the same
559 check_fixes(
1075 r#" 560 r#"
1076//- /main.rs 561//- /main.rs
1077fn f() {} 562fn f() {}
1078//- /foo.rs 563//- /foo.rs
1079$0 564$0
1080"#, 565"#,
1081 r#" 566 vec![
567 r#"
1082mod foo; 568mod foo;
1083 569
1084fn f() {} 570fn f() {}
1085"#, 571"#,
572 r#"
573pub mod foo;
574
575fn f() {}
576"#,
577 ],
1086 ); 578 );
1087 } 579 }
1088 580
diff --git a/crates/ide/src/diagnostics/field_shorthand.rs b/crates/ide/src/diagnostics/field_shorthand.rs
index 2b1787f9b..01bd2dba6 100644
--- a/crates/ide/src/diagnostics/field_shorthand.rs
+++ b/crates/ide/src/diagnostics/field_shorthand.rs
@@ -46,14 +46,13 @@ fn check_expr_field_shorthand(
46 46
47 let field_range = record_field.syntax().text_range(); 47 let field_range = record_field.syntax().text_range();
48 acc.push( 48 acc.push(
49 Diagnostic::hint(field_range, "Shorthand struct initialization".to_string()).with_fix( 49 Diagnostic::hint(field_range, "Shorthand struct initialization".to_string())
50 Some(fix( 50 .with_fixes(Some(vec![fix(
51 "use_expr_field_shorthand", 51 "use_expr_field_shorthand",
52 "Use struct shorthand initialization", 52 "Use struct shorthand initialization",
53 SourceChange::from_text_edit(file_id, edit), 53 SourceChange::from_text_edit(file_id, edit),
54 field_range, 54 field_range,
55 )), 55 )])),
56 ),
57 ); 56 );
58 } 57 }
59} 58}
@@ -86,13 +85,13 @@ fn check_pat_field_shorthand(
86 let edit = edit_builder.finish(); 85 let edit = edit_builder.finish();
87 86
88 let field_range = record_pat_field.syntax().text_range(); 87 let field_range = record_pat_field.syntax().text_range();
89 acc.push(Diagnostic::hint(field_range, "Shorthand struct pattern".to_string()).with_fix( 88 acc.push(Diagnostic::hint(field_range, "Shorthand struct pattern".to_string()).with_fixes(
90 Some(fix( 89 Some(vec![fix(
91 "use_pat_field_shorthand", 90 "use_pat_field_shorthand",
92 "Use struct field shorthand", 91 "Use struct field shorthand",
93 SourceChange::from_text_edit(file_id, edit), 92 SourceChange::from_text_edit(file_id, edit),
94 field_range, 93 field_range,
95 )), 94 )]),
96 )); 95 ));
97 } 96 }
98} 97}
diff --git a/crates/ide/src/diagnostics/fixes.rs b/crates/ide/src/diagnostics/fixes.rs
index 15821500f..258ac6974 100644
--- a/crates/ide/src/diagnostics/fixes.rs
+++ b/crates/ide/src/diagnostics/fixes.rs
@@ -1,297 +1,31 @@
1//! Provides a way to attach fixes to the diagnostics. 1//! Provides a way to attach fixes to the diagnostics.
2//! The same module also has all curret custom fixes for the diagnostics implemented. 2//! The same module also has all curret custom fixes for the diagnostics implemented.
3use hir::{ 3mod change_case;
4 db::AstDatabase, 4mod create_field;
5 diagnostics::{ 5mod fill_missing_fields;
6 Diagnostic, IncorrectCase, MissingFields, MissingOkOrSomeInTailExpr, NoSuchField, 6mod remove_semicolon;
7 RemoveThisSemicolon, ReplaceFilterMapNextWithFindMap, UnresolvedModule, 7mod replace_with_find_map;
8 }, 8mod unresolved_module;
9 HasSource, HirDisplay, InFile, Semantics, VariantDef, 9mod wrap_tail_expr;
10}; 10
11use hir::{diagnostics::Diagnostic, Semantics};
11use ide_assists::AssistResolveStrategy; 12use ide_assists::AssistResolveStrategy;
12use ide_db::{ 13use ide_db::RootDatabase;
13 base_db::{AnchoredPathBuf, FileId},
14 source_change::{FileSystemEdit, SourceChange},
15 RootDatabase,
16};
17use syntax::{
18 algo,
19 ast::{self, edit::IndentLevel, make, ArgListOwner},
20 AstNode, TextRange,
21};
22use text_edit::TextEdit;
23 14
24use crate::{ 15use crate::Assist;
25 diagnostics::{fix, unresolved_fix},
26 references::rename::rename_with_semantics,
27 Assist, FilePosition,
28};
29 16
30/// A [Diagnostic] that potentially has a fix available. 17/// A [Diagnostic] that potentially has some fixes available.
31/// 18///
32/// [Diagnostic]: hir::diagnostics::Diagnostic 19/// [Diagnostic]: hir::diagnostics::Diagnostic
33pub(crate) trait DiagnosticWithFix: Diagnostic { 20pub(crate) trait DiagnosticWithFixes: Diagnostic {
34 /// `resolve` determines if the diagnostic should fill in the `edit` field 21 /// `resolve` determines if the diagnostic should fill in the `edit` field
35 /// of the assist. 22 /// of the assist.
36 /// 23 ///
37 /// If `resolve` is false, the edit will be computed later, on demand, and 24 /// If `resolve` is false, the edit will be computed later, on demand, and
38 /// can be omitted. 25 /// can be omitted.
39 fn fix( 26 fn fixes(
40 &self, 27 &self,
41 sema: &Semantics<RootDatabase>, 28 sema: &Semantics<RootDatabase>,
42 _resolve: &AssistResolveStrategy, 29 _resolve: &AssistResolveStrategy,
43 ) -> Option<Assist>; 30 ) -> Option<Vec<Assist>>;
44}
45
46impl DiagnosticWithFix for UnresolvedModule {
47 fn fix(
48 &self,
49 sema: &Semantics<RootDatabase>,
50 _resolve: &AssistResolveStrategy,
51 ) -> Option<Assist> {
52 let root = sema.db.parse_or_expand(self.file)?;
53 let unresolved_module = self.decl.to_node(&root);
54 Some(fix(
55 "create_module",
56 "Create module",
57 FileSystemEdit::CreateFile {
58 dst: AnchoredPathBuf {
59 anchor: self.file.original_file(sema.db),
60 path: self.candidate.clone(),
61 },
62 initial_contents: "".to_string(),
63 }
64 .into(),
65 unresolved_module.syntax().text_range(),
66 ))
67 }
68}
69
70impl DiagnosticWithFix for NoSuchField {
71 fn fix(
72 &self,
73 sema: &Semantics<RootDatabase>,
74 _resolve: &AssistResolveStrategy,
75 ) -> Option<Assist> {
76 let root = sema.db.parse_or_expand(self.file)?;
77 missing_record_expr_field_fix(
78 &sema,
79 self.file.original_file(sema.db),
80 &self.field.to_node(&root),
81 )
82 }
83}
84
85impl DiagnosticWithFix for MissingFields {
86 fn fix(
87 &self,
88 sema: &Semantics<RootDatabase>,
89 _resolve: &AssistResolveStrategy,
90 ) -> Option<Assist> {
91 // Note that although we could add a diagnostics to
92 // fill the missing tuple field, e.g :
93 // `struct A(usize);`
94 // `let a = A { 0: () }`
95 // but it is uncommon usage and it should not be encouraged.
96 if self.missed_fields.iter().any(|it| it.as_tuple_index().is_some()) {
97 return None;
98 }
99
100 let root = sema.db.parse_or_expand(self.file)?;
101 let field_list_parent = self.field_list_parent.to_node(&root);
102 let old_field_list = field_list_parent.record_expr_field_list()?;
103 let mut new_field_list = old_field_list.clone();
104 for f in self.missed_fields.iter() {
105 let field =
106 make::record_expr_field(make::name_ref(&f.to_string()), Some(make::expr_unit()));
107 new_field_list = new_field_list.append_field(&field);
108 }
109
110 let edit = {
111 let mut builder = TextEdit::builder();
112 algo::diff(&old_field_list.syntax(), &new_field_list.syntax())
113 .into_text_edit(&mut builder);
114 builder.finish()
115 };
116 Some(fix(
117 "fill_missing_fields",
118 "Fill struct fields",
119 SourceChange::from_text_edit(self.file.original_file(sema.db), edit),
120 sema.original_range(&field_list_parent.syntax()).range,
121 ))
122 }
123}
124
125impl DiagnosticWithFix for MissingOkOrSomeInTailExpr {
126 fn fix(
127 &self,
128 sema: &Semantics<RootDatabase>,
129 _resolve: &AssistResolveStrategy,
130 ) -> Option<Assist> {
131 let root = sema.db.parse_or_expand(self.file)?;
132 let tail_expr = self.expr.to_node(&root);
133 let tail_expr_range = tail_expr.syntax().text_range();
134 let replacement = format!("{}({})", self.required, tail_expr.syntax());
135 let edit = TextEdit::replace(tail_expr_range, replacement);
136 let source_change = SourceChange::from_text_edit(self.file.original_file(sema.db), edit);
137 let name = if self.required == "Ok" { "Wrap with Ok" } else { "Wrap with Some" };
138 Some(fix("wrap_tail_expr", name, source_change, tail_expr_range))
139 }
140}
141
142impl DiagnosticWithFix for RemoveThisSemicolon {
143 fn fix(
144 &self,
145 sema: &Semantics<RootDatabase>,
146 _resolve: &AssistResolveStrategy,
147 ) -> Option<Assist> {
148 let root = sema.db.parse_or_expand(self.file)?;
149
150 let semicolon = self
151 .expr
152 .to_node(&root)
153 .syntax()
154 .parent()
155 .and_then(ast::ExprStmt::cast)
156 .and_then(|expr| expr.semicolon_token())?
157 .text_range();
158
159 let edit = TextEdit::delete(semicolon);
160 let source_change = SourceChange::from_text_edit(self.file.original_file(sema.db), edit);
161
162 Some(fix("remove_semicolon", "Remove this semicolon", source_change, semicolon))
163 }
164}
165
166impl DiagnosticWithFix for IncorrectCase {
167 fn fix(
168 &self,
169 sema: &Semantics<RootDatabase>,
170 resolve: &AssistResolveStrategy,
171 ) -> Option<Assist> {
172 let root = sema.db.parse_or_expand(self.file)?;
173 let name_node = self.ident.to_node(&root);
174
175 let name_node = InFile::new(self.file, name_node.syntax());
176 let frange = name_node.original_file_range(sema.db);
177 let file_position = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
178
179 let label = format!("Rename to {}", self.suggested_text);
180 let mut res = unresolved_fix("change_case", &label, frange.range);
181 if resolve.should_resolve(&res.id) {
182 let source_change = rename_with_semantics(sema, file_position, &self.suggested_text);
183 res.source_change = Some(source_change.ok().unwrap_or_default());
184 }
185
186 Some(res)
187 }
188}
189
190impl DiagnosticWithFix for ReplaceFilterMapNextWithFindMap {
191 fn fix(
192 &self,
193 sema: &Semantics<RootDatabase>,
194 _resolve: &AssistResolveStrategy,
195 ) -> Option<Assist> {
196 let root = sema.db.parse_or_expand(self.file)?;
197 let next_expr = self.next_expr.to_node(&root);
198 let next_call = ast::MethodCallExpr::cast(next_expr.syntax().clone())?;
199
200 let filter_map_call = ast::MethodCallExpr::cast(next_call.receiver()?.syntax().clone())?;
201 let filter_map_name_range = filter_map_call.name_ref()?.ident_token()?.text_range();
202 let filter_map_args = filter_map_call.arg_list()?;
203
204 let range_to_replace =
205 TextRange::new(filter_map_name_range.start(), next_expr.syntax().text_range().end());
206 let replacement = format!("find_map{}", filter_map_args.syntax().text());
207 let trigger_range = next_expr.syntax().text_range();
208
209 let edit = TextEdit::replace(range_to_replace, replacement);
210
211 let source_change = SourceChange::from_text_edit(self.file.original_file(sema.db), edit);
212
213 Some(fix(
214 "replace_with_find_map",
215 "Replace filter_map(..).next() with find_map()",
216 source_change,
217 trigger_range,
218 ))
219 }
220}
221
222fn missing_record_expr_field_fix(
223 sema: &Semantics<RootDatabase>,
224 usage_file_id: FileId,
225 record_expr_field: &ast::RecordExprField,
226) -> Option<Assist> {
227 let record_lit = ast::RecordExpr::cast(record_expr_field.syntax().parent()?.parent()?)?;
228 let def_id = sema.resolve_variant(record_lit)?;
229 let module;
230 let def_file_id;
231 let record_fields = match def_id {
232 VariantDef::Struct(s) => {
233 module = s.module(sema.db);
234 let source = s.source(sema.db)?;
235 def_file_id = source.file_id;
236 let fields = source.value.field_list()?;
237 record_field_list(fields)?
238 }
239 VariantDef::Union(u) => {
240 module = u.module(sema.db);
241 let source = u.source(sema.db)?;
242 def_file_id = source.file_id;
243 source.value.record_field_list()?
244 }
245 VariantDef::Variant(e) => {
246 module = e.module(sema.db);
247 let source = e.source(sema.db)?;
248 def_file_id = source.file_id;
249 let fields = source.value.field_list()?;
250 record_field_list(fields)?
251 }
252 };
253 let def_file_id = def_file_id.original_file(sema.db);
254
255 let new_field_type = sema.type_of_expr(&record_expr_field.expr()?)?;
256 if new_field_type.is_unknown() {
257 return None;
258 }
259 let new_field = make::record_field(
260 None,
261 make::name(&record_expr_field.field_name()?.text()),
262 make::ty(&new_field_type.display_source_code(sema.db, module.into()).ok()?),
263 );
264
265 let last_field = record_fields.fields().last()?;
266 let last_field_syntax = last_field.syntax();
267 let indent = IndentLevel::from_node(last_field_syntax);
268
269 let mut new_field = new_field.to_string();
270 if usage_file_id != def_file_id {
271 new_field = format!("pub(crate) {}", new_field);
272 }
273 new_field = format!("\n{}{}", indent, new_field);
274
275 let needs_comma = !last_field_syntax.to_string().ends_with(',');
276 if needs_comma {
277 new_field = format!(",{}", new_field);
278 }
279
280 let source_change = SourceChange::from_text_edit(
281 def_file_id,
282 TextEdit::insert(last_field_syntax.text_range().end(), new_field),
283 );
284 return Some(fix(
285 "create_field",
286 "Create field",
287 source_change,
288 record_expr_field.syntax().text_range(),
289 ));
290
291 fn record_field_list(field_def_list: ast::FieldList) -> Option<ast::RecordFieldList> {
292 match field_def_list {
293 ast::FieldList::RecordFieldList(it) => Some(it),
294 ast::FieldList::TupleFieldList(_) => None,
295 }
296 }
297} 31}
diff --git a/crates/ide/src/diagnostics/fixes/change_case.rs b/crates/ide/src/diagnostics/fixes/change_case.rs
new file mode 100644
index 000000000..42be3375f
--- /dev/null
+++ b/crates/ide/src/diagnostics/fixes/change_case.rs
@@ -0,0 +1,155 @@
1use hir::{db::AstDatabase, diagnostics::IncorrectCase, InFile, Semantics};
2use ide_assists::{Assist, AssistResolveStrategy};
3use ide_db::{base_db::FilePosition, RootDatabase};
4use syntax::AstNode;
5
6use crate::{
7 diagnostics::{unresolved_fix, DiagnosticWithFixes},
8 references::rename::rename_with_semantics,
9};
10
11impl DiagnosticWithFixes for IncorrectCase {
12 fn fixes(
13 &self,
14 sema: &Semantics<RootDatabase>,
15 resolve: &AssistResolveStrategy,
16 ) -> Option<Vec<Assist>> {
17 let root = sema.db.parse_or_expand(self.file)?;
18 let name_node = self.ident.to_node(&root);
19
20 let name_node = InFile::new(self.file, name_node.syntax());
21 let frange = name_node.original_file_range(sema.db);
22 let file_position = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
23
24 let label = format!("Rename to {}", self.suggested_text);
25 let mut res = unresolved_fix("change_case", &label, frange.range);
26 if resolve.should_resolve(&res.id) {
27 let source_change = rename_with_semantics(sema, file_position, &self.suggested_text);
28 res.source_change = Some(source_change.ok().unwrap_or_default());
29 }
30
31 Some(vec![res])
32 }
33}
34
35#[cfg(test)]
36mod change_case {
37 use crate::{
38 diagnostics::tests::{check_fix, check_no_diagnostics},
39 fixture, AssistResolveStrategy, DiagnosticsConfig,
40 };
41
42 #[test]
43 fn test_rename_incorrect_case() {
44 check_fix(
45 r#"
46pub struct test_struct$0 { one: i32 }
47
48pub fn some_fn(val: test_struct) -> test_struct {
49 test_struct { one: val.one + 1 }
50}
51"#,
52 r#"
53pub struct TestStruct { one: i32 }
54
55pub fn some_fn(val: TestStruct) -> TestStruct {
56 TestStruct { one: val.one + 1 }
57}
58"#,
59 );
60
61 check_fix(
62 r#"
63pub fn some_fn(NonSnakeCase$0: u8) -> u8 {
64 NonSnakeCase
65}
66"#,
67 r#"
68pub fn some_fn(non_snake_case: u8) -> u8 {
69 non_snake_case
70}
71"#,
72 );
73
74 check_fix(
75 r#"
76pub fn SomeFn$0(val: u8) -> u8 {
77 if val != 0 { SomeFn(val - 1) } else { val }
78}
79"#,
80 r#"
81pub fn some_fn(val: u8) -> u8 {
82 if val != 0 { some_fn(val - 1) } else { val }
83}
84"#,
85 );
86
87 check_fix(
88 r#"
89fn some_fn() {
90 let whatAWeird_Formatting$0 = 10;
91 another_func(whatAWeird_Formatting);
92}
93"#,
94 r#"
95fn some_fn() {
96 let what_a_weird_formatting = 10;
97 another_func(what_a_weird_formatting);
98}
99"#,
100 );
101 }
102
103 #[test]
104 fn test_uppercase_const_no_diagnostics() {
105 check_no_diagnostics(
106 r#"
107fn foo() {
108 const ANOTHER_ITEM$0: &str = "some_item";
109}
110"#,
111 );
112 }
113
114 #[test]
115 fn test_rename_incorrect_case_struct_method() {
116 check_fix(
117 r#"
118pub struct TestStruct;
119
120impl TestStruct {
121 pub fn SomeFn$0() -> TestStruct {
122 TestStruct
123 }
124}
125"#,
126 r#"
127pub struct TestStruct;
128
129impl TestStruct {
130 pub fn some_fn() -> TestStruct {
131 TestStruct
132 }
133}
134"#,
135 );
136 }
137
138 #[test]
139 fn test_single_incorrect_case_diagnostic_in_function_name_issue_6970() {
140 let input = r#"fn FOO$0() {}"#;
141 let expected = r#"fn foo() {}"#;
142
143 let (analysis, file_position) = fixture::position(input);
144 let diagnostics = analysis
145 .diagnostics(
146 &DiagnosticsConfig::default(),
147 AssistResolveStrategy::All,
148 file_position.file_id,
149 )
150 .unwrap();
151 assert_eq!(diagnostics.len(), 1);
152
153 check_fix(input, expected);
154 }
155}
diff --git a/crates/ide/src/diagnostics/fixes/create_field.rs b/crates/ide/src/diagnostics/fixes/create_field.rs
new file mode 100644
index 000000000..a5f457dce
--- /dev/null
+++ b/crates/ide/src/diagnostics/fixes/create_field.rs
@@ -0,0 +1,156 @@
1use hir::{db::AstDatabase, diagnostics::NoSuchField, HasSource, HirDisplay, Semantics};
2use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase};
3use syntax::{
4 ast::{self, edit::IndentLevel, make},
5 AstNode,
6};
7use text_edit::TextEdit;
8
9use crate::{
10 diagnostics::{fix, DiagnosticWithFixes},
11 Assist, AssistResolveStrategy,
12};
13impl DiagnosticWithFixes for NoSuchField {
14 fn fixes(
15 &self,
16 sema: &Semantics<RootDatabase>,
17 _resolve: &AssistResolveStrategy,
18 ) -> Option<Vec<Assist>> {
19 let root = sema.db.parse_or_expand(self.file)?;
20 missing_record_expr_field_fixes(
21 &sema,
22 self.file.original_file(sema.db),
23 &self.field.to_node(&root),
24 )
25 }
26}
27
28fn missing_record_expr_field_fixes(
29 sema: &Semantics<RootDatabase>,
30 usage_file_id: FileId,
31 record_expr_field: &ast::RecordExprField,
32) -> Option<Vec<Assist>> {
33 let record_lit = ast::RecordExpr::cast(record_expr_field.syntax().parent()?.parent()?)?;
34 let def_id = sema.resolve_variant(record_lit)?;
35 let module;
36 let def_file_id;
37 let record_fields = match def_id {
38 hir::VariantDef::Struct(s) => {
39 module = s.module(sema.db);
40 let source = s.source(sema.db)?;
41 def_file_id = source.file_id;
42 let fields = source.value.field_list()?;
43 record_field_list(fields)?
44 }
45 hir::VariantDef::Union(u) => {
46 module = u.module(sema.db);
47 let source = u.source(sema.db)?;
48 def_file_id = source.file_id;
49 source.value.record_field_list()?
50 }
51 hir::VariantDef::Variant(e) => {
52 module = e.module(sema.db);
53 let source = e.source(sema.db)?;
54 def_file_id = source.file_id;
55 let fields = source.value.field_list()?;
56 record_field_list(fields)?
57 }
58 };
59 let def_file_id = def_file_id.original_file(sema.db);
60
61 let new_field_type = sema.type_of_expr(&record_expr_field.expr()?)?;
62 if new_field_type.is_unknown() {
63 return None;
64 }
65 let new_field = make::record_field(
66 None,
67 make::name(&record_expr_field.field_name()?.text()),
68 make::ty(&new_field_type.display_source_code(sema.db, module.into()).ok()?),
69 );
70
71 let last_field = record_fields.fields().last()?;
72 let last_field_syntax = last_field.syntax();
73 let indent = IndentLevel::from_node(last_field_syntax);
74
75 let mut new_field = new_field.to_string();
76 if usage_file_id != def_file_id {
77 new_field = format!("pub(crate) {}", new_field);
78 }
79 new_field = format!("\n{}{}", indent, new_field);
80
81 let needs_comma = !last_field_syntax.to_string().ends_with(',');
82 if needs_comma {
83 new_field = format!(",{}", new_field);
84 }
85
86 let source_change = SourceChange::from_text_edit(
87 def_file_id,
88 TextEdit::insert(last_field_syntax.text_range().end(), new_field),
89 );
90
91 return Some(vec![fix(
92 "create_field",
93 "Create field",
94 source_change,
95 record_expr_field.syntax().text_range(),
96 )]);
97
98 fn record_field_list(field_def_list: ast::FieldList) -> Option<ast::RecordFieldList> {
99 match field_def_list {
100 ast::FieldList::RecordFieldList(it) => Some(it),
101 ast::FieldList::TupleFieldList(_) => None,
102 }
103 }
104}
105
106#[cfg(test)]
107mod tests {
108 use crate::diagnostics::tests::check_fix;
109
110 #[test]
111 fn test_add_field_from_usage() {
112 check_fix(
113 r"
114fn main() {
115 Foo { bar: 3, baz$0: false};
116}
117struct Foo {
118 bar: i32
119}
120",
121 r"
122fn main() {
123 Foo { bar: 3, baz: false};
124}
125struct Foo {
126 bar: i32,
127 baz: bool
128}
129",
130 )
131 }
132
133 #[test]
134 fn test_add_field_in_other_file_from_usage() {
135 check_fix(
136 r#"
137//- /main.rs
138mod foo;
139
140fn main() {
141 foo::Foo { bar: 3, $0baz: false};
142}
143//- /foo.rs
144struct Foo {
145 bar: i32
146}
147"#,
148 r#"
149struct Foo {
150 bar: i32,
151 pub(crate) baz: bool
152}
153"#,
154 )
155 }
156}
diff --git a/crates/ide/src/diagnostics/fixes/fill_missing_fields.rs b/crates/ide/src/diagnostics/fixes/fill_missing_fields.rs
new file mode 100644
index 000000000..b5dd64c08
--- /dev/null
+++ b/crates/ide/src/diagnostics/fixes/fill_missing_fields.rs
@@ -0,0 +1,217 @@
1use hir::{db::AstDatabase, diagnostics::MissingFields, Semantics};
2use ide_assists::AssistResolveStrategy;
3use ide_db::{source_change::SourceChange, RootDatabase};
4use syntax::{algo, ast::make, AstNode};
5use text_edit::TextEdit;
6
7use crate::{
8 diagnostics::{fix, fixes::DiagnosticWithFixes},
9 Assist,
10};
11
12impl DiagnosticWithFixes for MissingFields {
13 fn fixes(
14 &self,
15 sema: &Semantics<RootDatabase>,
16 _resolve: &AssistResolveStrategy,
17 ) -> Option<Vec<Assist>> {
18 // Note that although we could add a diagnostics to
19 // fill the missing tuple field, e.g :
20 // `struct A(usize);`
21 // `let a = A { 0: () }`
22 // but it is uncommon usage and it should not be encouraged.
23 if self.missed_fields.iter().any(|it| it.as_tuple_index().is_some()) {
24 return None;
25 }
26
27 let root = sema.db.parse_or_expand(self.file)?;
28 let field_list_parent = self.field_list_parent.to_node(&root);
29 let old_field_list = field_list_parent.record_expr_field_list()?;
30 let new_field_list = old_field_list.clone_for_update();
31 for f in self.missed_fields.iter() {
32 let field =
33 make::record_expr_field(make::name_ref(&f.to_string()), Some(make::expr_unit()))
34 .clone_for_update();
35 new_field_list.add_field(field);
36 }
37
38 let edit = {
39 let mut builder = TextEdit::builder();
40 algo::diff(&old_field_list.syntax(), &new_field_list.syntax())
41 .into_text_edit(&mut builder);
42 builder.finish()
43 };
44 Some(vec![fix(
45 "fill_missing_fields",
46 "Fill struct fields",
47 SourceChange::from_text_edit(self.file.original_file(sema.db), edit),
48 sema.original_range(&field_list_parent.syntax()).range,
49 )])
50 }
51}
52
53#[cfg(test)]
54mod tests {
55 use crate::diagnostics::tests::{check_fix, check_no_diagnostics};
56
57 #[test]
58 fn test_fill_struct_fields_empty() {
59 check_fix(
60 r#"
61struct TestStruct { one: i32, two: i64 }
62
63fn test_fn() {
64 let s = TestStruct {$0};
65}
66"#,
67 r#"
68struct TestStruct { one: i32, two: i64 }
69
70fn test_fn() {
71 let s = TestStruct { one: (), two: () };
72}
73"#,
74 );
75 }
76
77 #[test]
78 fn test_fill_struct_fields_self() {
79 check_fix(
80 r#"
81struct TestStruct { one: i32 }
82
83impl TestStruct {
84 fn test_fn() { let s = Self {$0}; }
85}
86"#,
87 r#"
88struct TestStruct { one: i32 }
89
90impl TestStruct {
91 fn test_fn() { let s = Self { one: () }; }
92}
93"#,
94 );
95 }
96
97 #[test]
98 fn test_fill_struct_fields_enum() {
99 check_fix(
100 r#"
101enum Expr {
102 Bin { lhs: Box<Expr>, rhs: Box<Expr> }
103}
104
105impl Expr {
106 fn new_bin(lhs: Box<Expr>, rhs: Box<Expr>) -> Expr {
107 Expr::Bin {$0 }
108 }
109}
110"#,
111 r#"
112enum Expr {
113 Bin { lhs: Box<Expr>, rhs: Box<Expr> }
114}
115
116impl Expr {
117 fn new_bin(lhs: Box<Expr>, rhs: Box<Expr>) -> Expr {
118 Expr::Bin { lhs: (), rhs: () }
119 }
120}
121"#,
122 );
123 }
124
125 #[test]
126 fn test_fill_struct_fields_partial() {
127 check_fix(
128 r#"
129struct TestStruct { one: i32, two: i64 }
130
131fn test_fn() {
132 let s = TestStruct{ two: 2$0 };
133}
134"#,
135 r"
136struct TestStruct { one: i32, two: i64 }
137
138fn test_fn() {
139 let s = TestStruct{ two: 2, one: () };
140}
141",
142 );
143 }
144
145 #[test]
146 fn test_fill_struct_fields_raw_ident() {
147 check_fix(
148 r#"
149struct TestStruct { r#type: u8 }
150
151fn test_fn() {
152 TestStruct { $0 };
153}
154"#,
155 r"
156struct TestStruct { r#type: u8 }
157
158fn test_fn() {
159 TestStruct { r#type: () };
160}
161",
162 );
163 }
164
165 #[test]
166 fn test_fill_struct_fields_no_diagnostic() {
167 check_no_diagnostics(
168 r#"
169struct TestStruct { one: i32, two: i64 }
170
171fn test_fn() {
172 let one = 1;
173 let s = TestStruct{ one, two: 2 };
174}
175 "#,
176 );
177 }
178
179 #[test]
180 fn test_fill_struct_fields_no_diagnostic_on_spread() {
181 check_no_diagnostics(
182 r#"
183struct TestStruct { one: i32, two: i64 }
184
185fn test_fn() {
186 let one = 1;
187 let s = TestStruct{ ..a };
188}
189"#,
190 );
191 }
192
193 #[test]
194 fn test_fill_struct_fields_blank_line() {
195 check_fix(
196 r#"
197struct S { a: (), b: () }
198
199fn f() {
200 S {
201 $0
202 };
203}
204"#,
205 r#"
206struct S { a: (), b: () }
207
208fn f() {
209 S {
210 a: (),
211 b: (),
212 };
213}
214"#,
215 );
216 }
217}
diff --git a/crates/ide/src/diagnostics/fixes/remove_semicolon.rs b/crates/ide/src/diagnostics/fixes/remove_semicolon.rs
new file mode 100644
index 000000000..f1724d479
--- /dev/null
+++ b/crates/ide/src/diagnostics/fixes/remove_semicolon.rs
@@ -0,0 +1,41 @@
1use hir::{db::AstDatabase, diagnostics::RemoveThisSemicolon, Semantics};
2use ide_assists::{Assist, AssistResolveStrategy};
3use ide_db::{source_change::SourceChange, RootDatabase};
4use syntax::{ast, AstNode};
5use text_edit::TextEdit;
6
7use crate::diagnostics::{fix, DiagnosticWithFixes};
8
9impl DiagnosticWithFixes for RemoveThisSemicolon {
10 fn fixes(
11 &self,
12 sema: &Semantics<RootDatabase>,
13 _resolve: &AssistResolveStrategy,
14 ) -> Option<Vec<Assist>> {
15 let root = sema.db.parse_or_expand(self.file)?;
16
17 let semicolon = self
18 .expr
19 .to_node(&root)
20 .syntax()
21 .parent()
22 .and_then(ast::ExprStmt::cast)
23 .and_then(|expr| expr.semicolon_token())?
24 .text_range();
25
26 let edit = TextEdit::delete(semicolon);
27 let source_change = SourceChange::from_text_edit(self.file.original_file(sema.db), edit);
28
29 Some(vec![fix("remove_semicolon", "Remove this semicolon", source_change, semicolon)])
30 }
31}
32
33#[cfg(test)]
34mod tests {
35 use crate::diagnostics::tests::check_fix;
36
37 #[test]
38 fn remove_semicolon() {
39 check_fix(r#"fn f() -> i32 { 92$0; }"#, r#"fn f() -> i32 { 92 }"#);
40 }
41}
diff --git a/crates/ide/src/diagnostics/fixes/replace_with_find_map.rs b/crates/ide/src/diagnostics/fixes/replace_with_find_map.rs
new file mode 100644
index 000000000..444bf563b
--- /dev/null
+++ b/crates/ide/src/diagnostics/fixes/replace_with_find_map.rs
@@ -0,0 +1,84 @@
1use hir::{db::AstDatabase, diagnostics::ReplaceFilterMapNextWithFindMap, Semantics};
2use ide_assists::{Assist, AssistResolveStrategy};
3use ide_db::{source_change::SourceChange, RootDatabase};
4use syntax::{
5 ast::{self, ArgListOwner},
6 AstNode, TextRange,
7};
8use text_edit::TextEdit;
9
10use crate::diagnostics::{fix, DiagnosticWithFixes};
11
12impl DiagnosticWithFixes for ReplaceFilterMapNextWithFindMap {
13 fn fixes(
14 &self,
15 sema: &Semantics<RootDatabase>,
16 _resolve: &AssistResolveStrategy,
17 ) -> Option<Vec<Assist>> {
18 let root = sema.db.parse_or_expand(self.file)?;
19 let next_expr = self.next_expr.to_node(&root);
20 let next_call = ast::MethodCallExpr::cast(next_expr.syntax().clone())?;
21
22 let filter_map_call = ast::MethodCallExpr::cast(next_call.receiver()?.syntax().clone())?;
23 let filter_map_name_range = filter_map_call.name_ref()?.ident_token()?.text_range();
24 let filter_map_args = filter_map_call.arg_list()?;
25
26 let range_to_replace =
27 TextRange::new(filter_map_name_range.start(), next_expr.syntax().text_range().end());
28 let replacement = format!("find_map{}", filter_map_args.syntax().text());
29 let trigger_range = next_expr.syntax().text_range();
30
31 let edit = TextEdit::replace(range_to_replace, replacement);
32
33 let source_change = SourceChange::from_text_edit(self.file.original_file(sema.db), edit);
34
35 Some(vec![fix(
36 "replace_with_find_map",
37 "Replace filter_map(..).next() with find_map()",
38 source_change,
39 trigger_range,
40 )])
41 }
42}
43
44#[cfg(test)]
45mod tests {
46 use crate::diagnostics::tests::check_fix;
47
48 #[test]
49 fn replace_with_wind_map() {
50 check_fix(
51 r#"
52//- /main.rs crate:main deps:core
53use core::iter::Iterator;
54use core::option::Option::{self, Some, None};
55fn foo() {
56 let m = [1, 2, 3].iter().$0filter_map(|x| if *x == 2 { Some (4) } else { None }).next();
57}
58//- /core/lib.rs crate:core
59pub mod option {
60 pub enum Option<T> { Some(T), None }
61}
62pub mod iter {
63 pub trait Iterator {
64 type Item;
65 fn filter_map<B, F>(self, f: F) -> FilterMap where F: FnMut(Self::Item) -> Option<B> { FilterMap }
66 fn next(&mut self) -> Option<Self::Item>;
67 }
68 pub struct FilterMap {}
69 impl Iterator for FilterMap {
70 type Item = i32;
71 fn next(&mut self) -> i32 { 7 }
72 }
73}
74"#,
75 r#"
76use core::iter::Iterator;
77use core::option::Option::{self, Some, None};
78fn foo() {
79 let m = [1, 2, 3].iter().find_map(|x| if *x == 2 { Some (4) } else { None });
80}
81"#,
82 )
83 }
84}
diff --git a/crates/ide/src/diagnostics/fixes/unresolved_module.rs b/crates/ide/src/diagnostics/fixes/unresolved_module.rs
new file mode 100644
index 000000000..b3d0283bb
--- /dev/null
+++ b/crates/ide/src/diagnostics/fixes/unresolved_module.rs
@@ -0,0 +1,89 @@
1use hir::{db::AstDatabase, diagnostics::UnresolvedModule, Semantics};
2use ide_assists::{Assist, AssistResolveStrategy};
3use ide_db::{base_db::AnchoredPathBuf, source_change::FileSystemEdit, RootDatabase};
4use syntax::AstNode;
5
6use crate::diagnostics::{fix, DiagnosticWithFixes};
7
8impl DiagnosticWithFixes for UnresolvedModule {
9 fn fixes(
10 &self,
11 sema: &Semantics<RootDatabase>,
12 _resolve: &AssistResolveStrategy,
13 ) -> Option<Vec<Assist>> {
14 let root = sema.db.parse_or_expand(self.file)?;
15 let unresolved_module = self.decl.to_node(&root);
16 Some(vec![fix(
17 "create_module",
18 "Create module",
19 FileSystemEdit::CreateFile {
20 dst: AnchoredPathBuf {
21 anchor: self.file.original_file(sema.db),
22 path: self.candidate.clone(),
23 },
24 initial_contents: "".to_string(),
25 }
26 .into(),
27 unresolved_module.syntax().text_range(),
28 )])
29 }
30}
31
32#[cfg(test)]
33mod tests {
34 use expect_test::expect;
35
36 use crate::diagnostics::tests::check_expect;
37
38 #[test]
39 fn test_unresolved_module_diagnostic() {
40 check_expect(
41 r#"mod foo;"#,
42 expect![[r#"
43 [
44 Diagnostic {
45 message: "unresolved module",
46 range: 0..8,
47 severity: Error,
48 fixes: Some(
49 [
50 Assist {
51 id: AssistId(
52 "create_module",
53 QuickFix,
54 ),
55 label: "Create module",
56 group: None,
57 target: 0..8,
58 source_change: Some(
59 SourceChange {
60 source_file_edits: {},
61 file_system_edits: [
62 CreateFile {
63 dst: AnchoredPathBuf {
64 anchor: FileId(
65 0,
66 ),
67 path: "foo.rs",
68 },
69 initial_contents: "",
70 },
71 ],
72 is_snippet: false,
73 },
74 ),
75 },
76 ],
77 ),
78 unused: false,
79 code: Some(
80 DiagnosticCode(
81 "unresolved-module",
82 ),
83 ),
84 },
85 ]
86 "#]],
87 );
88 }
89}
diff --git a/crates/ide/src/diagnostics/fixes/wrap_tail_expr.rs b/crates/ide/src/diagnostics/fixes/wrap_tail_expr.rs
new file mode 100644
index 000000000..715a403b9
--- /dev/null
+++ b/crates/ide/src/diagnostics/fixes/wrap_tail_expr.rs
@@ -0,0 +1,211 @@
1use hir::{db::AstDatabase, diagnostics::MissingOkOrSomeInTailExpr, Semantics};
2use ide_assists::{Assist, AssistResolveStrategy};
3use ide_db::{source_change::SourceChange, RootDatabase};
4use syntax::AstNode;
5use text_edit::TextEdit;
6
7use crate::diagnostics::{fix, DiagnosticWithFixes};
8
9impl DiagnosticWithFixes for MissingOkOrSomeInTailExpr {
10 fn fixes(
11 &self,
12 sema: &Semantics<RootDatabase>,
13 _resolve: &AssistResolveStrategy,
14 ) -> Option<Vec<Assist>> {
15 let root = sema.db.parse_or_expand(self.file)?;
16 let tail_expr = self.expr.to_node(&root);
17 let tail_expr_range = tail_expr.syntax().text_range();
18 let replacement = format!("{}({})", self.required, tail_expr.syntax());
19 let edit = TextEdit::replace(tail_expr_range, replacement);
20 let source_change = SourceChange::from_text_edit(self.file.original_file(sema.db), edit);
21 let name = if self.required == "Ok" { "Wrap with Ok" } else { "Wrap with Some" };
22 Some(vec![fix("wrap_tail_expr", name, source_change, tail_expr_range)])
23 }
24}
25
26#[cfg(test)]
27mod tests {
28 use crate::diagnostics::tests::{check_fix, check_no_diagnostics};
29
30 #[test]
31 fn test_wrap_return_type_option() {
32 check_fix(
33 r#"
34//- /main.rs crate:main deps:core
35use core::option::Option::{self, Some, None};
36
37fn div(x: i32, y: i32) -> Option<i32> {
38 if y == 0 {
39 return None;
40 }
41 x / y$0
42}
43//- /core/lib.rs crate:core
44pub mod result {
45 pub enum Result<T, E> { Ok(T), Err(E) }
46}
47pub mod option {
48 pub enum Option<T> { Some(T), None }
49}
50"#,
51 r#"
52use core::option::Option::{self, Some, None};
53
54fn div(x: i32, y: i32) -> Option<i32> {
55 if y == 0 {
56 return None;
57 }
58 Some(x / y)
59}
60"#,
61 );
62 }
63
64 #[test]
65 fn test_wrap_return_type() {
66 check_fix(
67 r#"
68//- /main.rs crate:main deps:core
69use core::result::Result::{self, Ok, Err};
70
71fn div(x: i32, y: i32) -> Result<i32, ()> {
72 if y == 0 {
73 return Err(());
74 }
75 x / y$0
76}
77//- /core/lib.rs crate:core
78pub mod result {
79 pub enum Result<T, E> { Ok(T), Err(E) }
80}
81pub mod option {
82 pub enum Option<T> { Some(T), None }
83}
84"#,
85 r#"
86use core::result::Result::{self, Ok, Err};
87
88fn div(x: i32, y: i32) -> Result<i32, ()> {
89 if y == 0 {
90 return Err(());
91 }
92 Ok(x / y)
93}
94"#,
95 );
96 }
97
98 #[test]
99 fn test_wrap_return_type_handles_generic_functions() {
100 check_fix(
101 r#"
102//- /main.rs crate:main deps:core
103use core::result::Result::{self, Ok, Err};
104
105fn div<T>(x: T) -> Result<T, i32> {
106 if x == 0 {
107 return Err(7);
108 }
109 $0x
110}
111//- /core/lib.rs crate:core
112pub mod result {
113 pub enum Result<T, E> { Ok(T), Err(E) }
114}
115pub mod option {
116 pub enum Option<T> { Some(T), None }
117}
118"#,
119 r#"
120use core::result::Result::{self, Ok, Err};
121
122fn div<T>(x: T) -> Result<T, i32> {
123 if x == 0 {
124 return Err(7);
125 }
126 Ok(x)
127}
128"#,
129 );
130 }
131
132 #[test]
133 fn test_wrap_return_type_handles_type_aliases() {
134 check_fix(
135 r#"
136//- /main.rs crate:main deps:core
137use core::result::Result::{self, Ok, Err};
138
139type MyResult<T> = Result<T, ()>;
140
141fn div(x: i32, y: i32) -> MyResult<i32> {
142 if y == 0 {
143 return Err(());
144 }
145 x $0/ y
146}
147//- /core/lib.rs crate:core
148pub mod result {
149 pub enum Result<T, E> { Ok(T), Err(E) }
150}
151pub mod option {
152 pub enum Option<T> { Some(T), None }
153}
154"#,
155 r#"
156use core::result::Result::{self, Ok, Err};
157
158type MyResult<T> = Result<T, ()>;
159
160fn div(x: i32, y: i32) -> MyResult<i32> {
161 if y == 0 {
162 return Err(());
163 }
164 Ok(x / y)
165}
166"#,
167 );
168 }
169
170 #[test]
171 fn test_wrap_return_type_not_applicable_when_expr_type_does_not_match_ok_type() {
172 check_no_diagnostics(
173 r#"
174//- /main.rs crate:main deps:core
175use core::result::Result::{self, Ok, Err};
176
177fn foo() -> Result<(), i32> { 0 }
178
179//- /core/lib.rs crate:core
180pub mod result {
181 pub enum Result<T, E> { Ok(T), Err(E) }
182}
183pub mod option {
184 pub enum Option<T> { Some(T), None }
185}
186"#,
187 );
188 }
189
190 #[test]
191 fn test_wrap_return_type_not_applicable_when_return_type_is_not_result_or_option() {
192 check_no_diagnostics(
193 r#"
194//- /main.rs crate:main deps:core
195use core::result::Result::{self, Ok, Err};
196
197enum SomeOtherEnum { Ok(i32), Err(String) }
198
199fn foo() -> SomeOtherEnum { 0 }
200
201//- /core/lib.rs crate:core
202pub mod result {
203 pub enum Result<T, E> { Ok(T), Err(E) }
204}
205pub mod option {
206 pub enum Option<T> { Some(T), None }
207}
208"#,
209 );
210 }
211}
diff --git a/crates/ide/src/diagnostics/unlinked_file.rs b/crates/ide/src/diagnostics/unlinked_file.rs
index 93fd25dea..51fe0f360 100644
--- a/crates/ide/src/diagnostics/unlinked_file.rs
+++ b/crates/ide/src/diagnostics/unlinked_file.rs
@@ -18,7 +18,7 @@ use syntax::{
18use text_edit::TextEdit; 18use text_edit::TextEdit;
19 19
20use crate::{ 20use crate::{
21 diagnostics::{fix, fixes::DiagnosticWithFix}, 21 diagnostics::{fix, fixes::DiagnosticWithFixes},
22 Assist, 22 Assist,
23}; 23};
24 24
@@ -50,13 +50,13 @@ impl Diagnostic for UnlinkedFile {
50 } 50 }
51} 51}
52 52
53impl DiagnosticWithFix for UnlinkedFile { 53impl DiagnosticWithFixes for UnlinkedFile {
54 fn fix( 54 fn fixes(
55 &self, 55 &self,
56 sema: &hir::Semantics<RootDatabase>, 56 sema: &hir::Semantics<RootDatabase>,
57 _resolve: &AssistResolveStrategy, 57 _resolve: &AssistResolveStrategy,
58 ) -> Option<Assist> { 58 ) -> Option<Vec<Assist>> {
59 // If there's an existing module that could add a `mod` item to include the unlinked file, 59 // If there's an existing module that could add `mod` or `pub mod` items to include the unlinked file,
60 // suggest that as a fix. 60 // suggest that as a fix.
61 61
62 let source_root = sema.db.source_root(sema.db.file_source_root(self.file_id)); 62 let source_root = sema.db.source_root(sema.db.file_source_root(self.file_id));
@@ -90,7 +90,7 @@ impl DiagnosticWithFix for UnlinkedFile {
90 } 90 }
91 91
92 if module.origin.file_id() == Some(*parent_id) { 92 if module.origin.file_id() == Some(*parent_id) {
93 return make_fix(sema.db, *parent_id, module_name, self.file_id); 93 return make_fixes(sema.db, *parent_id, module_name, self.file_id);
94 } 94 }
95 } 95 }
96 } 96 }
@@ -101,20 +101,23 @@ impl DiagnosticWithFix for UnlinkedFile {
101 } 101 }
102} 102}
103 103
104fn make_fix( 104fn make_fixes(
105 db: &RootDatabase, 105 db: &RootDatabase,
106 parent_file_id: FileId, 106 parent_file_id: FileId,
107 new_mod_name: &str, 107 new_mod_name: &str,
108 added_file_id: FileId, 108 added_file_id: FileId,
109) -> Option<Assist> { 109) -> Option<Vec<Assist>> {
110 fn is_outline_mod(item: &ast::Item) -> bool { 110 fn is_outline_mod(item: &ast::Item) -> bool {
111 matches!(item, ast::Item::Module(m) if m.item_list().is_none()) 111 matches!(item, ast::Item::Module(m) if m.item_list().is_none())
112 } 112 }
113 113
114 let mod_decl = format!("mod {};", new_mod_name); 114 let mod_decl = format!("mod {};", new_mod_name);
115 let pub_mod_decl = format!("pub mod {};", new_mod_name);
116
115 let ast: ast::SourceFile = db.parse(parent_file_id).tree(); 117 let ast: ast::SourceFile = db.parse(parent_file_id).tree();
116 118
117 let mut builder = TextEdit::builder(); 119 let mut mod_decl_builder = TextEdit::builder();
120 let mut pub_mod_decl_builder = TextEdit::builder();
118 121
119 // If there's an existing `mod m;` statement matching the new one, don't emit a fix (it's 122 // If there's an existing `mod m;` statement matching the new one, don't emit a fix (it's
120 // probably `#[cfg]`d out). 123 // probably `#[cfg]`d out).
@@ -138,30 +141,43 @@ fn make_fix(
138 { 141 {
139 Some(last) => { 142 Some(last) => {
140 cov_mark::hit!(unlinked_file_append_to_existing_mods); 143 cov_mark::hit!(unlinked_file_append_to_existing_mods);
141 builder.insert(last.syntax().text_range().end(), format!("\n{}", mod_decl)); 144 let offset = last.syntax().text_range().end();
145 mod_decl_builder.insert(offset, format!("\n{}", mod_decl));
146 pub_mod_decl_builder.insert(offset, format!("\n{}", pub_mod_decl));
142 } 147 }
143 None => { 148 None => {
144 // Prepend before the first item in the file. 149 // Prepend before the first item in the file.
145 match ast.items().next() { 150 match ast.items().next() {
146 Some(item) => { 151 Some(item) => {
147 cov_mark::hit!(unlinked_file_prepend_before_first_item); 152 cov_mark::hit!(unlinked_file_prepend_before_first_item);
148 builder.insert(item.syntax().text_range().start(), format!("{}\n\n", mod_decl)); 153 let offset = item.syntax().text_range().start();
154 mod_decl_builder.insert(offset, format!("{}\n\n", mod_decl));
155 pub_mod_decl_builder.insert(offset, format!("{}\n\n", pub_mod_decl));
149 } 156 }
150 None => { 157 None => {
151 // No items in the file, so just append at the end. 158 // No items in the file, so just append at the end.
152 cov_mark::hit!(unlinked_file_empty_file); 159 cov_mark::hit!(unlinked_file_empty_file);
153 builder.insert(ast.syntax().text_range().end(), format!("{}\n", mod_decl)); 160 let offset = ast.syntax().text_range().end();
161 mod_decl_builder.insert(offset, format!("{}\n", mod_decl));
162 pub_mod_decl_builder.insert(offset, format!("{}\n", pub_mod_decl));
154 } 163 }
155 } 164 }
156 } 165 }
157 } 166 }
158 167
159 let edit = builder.finish();
160 let trigger_range = db.parse(added_file_id).tree().syntax().text_range(); 168 let trigger_range = db.parse(added_file_id).tree().syntax().text_range();
161 Some(fix( 169 Some(vec![
162 "add_mod_declaration", 170 fix(
163 &format!("Insert `{}`", mod_decl), 171 "add_mod_declaration",
164 SourceChange::from_text_edit(parent_file_id, edit), 172 &format!("Insert `{}`", mod_decl),
165 trigger_range, 173 SourceChange::from_text_edit(parent_file_id, mod_decl_builder.finish()),
166 )) 174 trigger_range,
175 ),
176 fix(
177 "add_pub_mod_declaration",
178 &format!("Insert `{}`", pub_mod_decl),
179 SourceChange::from_text_edit(parent_file_id, pub_mod_decl_builder.finish()),
180 trigger_range,
181 ),
182 ])
167} 183}
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs
index cb5a8e19a..320694a17 100644
--- a/crates/ide/src/doc_links.rs
+++ b/crates/ide/src/doc_links.rs
@@ -29,7 +29,8 @@ pub(crate) type DocumentationLink = String;
29/// Rewrite documentation links in markdown to point to an online host (e.g. docs.rs) 29/// Rewrite documentation links in markdown to point to an online host (e.g. docs.rs)
30pub(crate) fn rewrite_links(db: &RootDatabase, markdown: &str, definition: &Definition) -> String { 30pub(crate) fn rewrite_links(db: &RootDatabase, markdown: &str, definition: &Definition) -> String {
31 let mut cb = broken_link_clone_cb; 31 let mut cb = broken_link_clone_cb;
32 let doc = Parser::new_with_broken_link_callback(markdown, Options::empty(), Some(&mut cb)); 32 let doc =
33 Parser::new_with_broken_link_callback(markdown, Options::ENABLE_TASKLISTS, Some(&mut cb));
33 34
34 let doc = map_links(doc, |target, title: &str| { 35 let doc = map_links(doc, |target, title: &str| {
35 // This check is imperfect, there's some overlap between valid intra-doc links 36 // This check is imperfect, there's some overlap between valid intra-doc links
@@ -64,8 +65,7 @@ pub(crate) fn rewrite_links(db: &RootDatabase, markdown: &str, definition: &Defi
64pub(crate) fn remove_links(markdown: &str) -> String { 65pub(crate) fn remove_links(markdown: &str) -> String {
65 let mut drop_link = false; 66 let mut drop_link = false;
66 67
67 let mut opts = Options::empty(); 68 let opts = Options::ENABLE_TASKLISTS | Options::ENABLE_FOOTNOTES;
68 opts.insert(Options::ENABLE_FOOTNOTES);
69 69
70 let mut cb = |_: BrokenLink| { 70 let mut cb = |_: BrokenLink| {
71 let empty = InlineStr::try_from("").unwrap(); 71 let empty = InlineStr::try_from("").unwrap();
@@ -123,7 +123,7 @@ pub(crate) fn extract_definitions_from_markdown(
123) -> Vec<(TextRange, String, Option<hir::Namespace>)> { 123) -> Vec<(TextRange, String, Option<hir::Namespace>)> {
124 Parser::new_with_broken_link_callback( 124 Parser::new_with_broken_link_callback(
125 markdown, 125 markdown,
126 Options::empty(), 126 Options::ENABLE_TASKLISTS,
127 Some(&mut broken_link_clone_cb), 127 Some(&mut broken_link_clone_cb),
128 ) 128 )
129 .into_offset_iter() 129 .into_offset_iter()
diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs
index 61dcbb399..c67ccd1a9 100644
--- a/crates/ide/src/join_lines.rs
+++ b/crates/ide/src/join_lines.rs
@@ -4,7 +4,7 @@ use ide_assists::utils::extract_trivial_expression;
4use itertools::Itertools; 4use itertools::Itertools;
5use syntax::{ 5use syntax::{
6 algo::non_trivia_sibling, 6 algo::non_trivia_sibling,
7 ast::{self, AstNode, AstToken}, 7 ast::{self, AstNode, AstToken, IsString},
8 Direction, NodeOrToken, SourceFile, 8 Direction, NodeOrToken, SourceFile,
9 SyntaxKind::{self, USE_TREE, WHITESPACE}, 9 SyntaxKind::{self, USE_TREE, WHITESPACE},
10 SyntaxNode, SyntaxToken, TextRange, TextSize, T, 10 SyntaxNode, SyntaxToken, TextRange, TextSize, T,
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index db08547d1..f4b90db3a 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -565,7 +565,7 @@ impl Analysis {
565 let diagnostic_assists = if include_fixes { 565 let diagnostic_assists = if include_fixes {
566 diagnostics::diagnostics(db, diagnostics_config, &resolve, frange.file_id) 566 diagnostics::diagnostics(db, diagnostics_config, &resolve, frange.file_id)
567 .into_iter() 567 .into_iter()
568 .filter_map(|it| it.fix) 568 .flat_map(|it| it.fixes.unwrap_or_default())
569 .filter(|it| it.target.intersect(frange.range).is_some()) 569 .filter(|it| it.target.intersect(frange.range).is_some())
570 .collect() 570 .collect()
571 } else { 571 } else {
diff --git a/crates/ide/src/matching_brace.rs b/crates/ide/src/matching_brace.rs
index 261dcc255..011c8cc55 100644
--- a/crates/ide/src/matching_brace.rs
+++ b/crates/ide/src/matching_brace.rs
@@ -19,14 +19,10 @@ use syntax::{
19pub(crate) fn matching_brace(file: &SourceFile, offset: TextSize) -> Option<TextSize> { 19pub(crate) fn matching_brace(file: &SourceFile, offset: TextSize) -> Option<TextSize> {
20 const BRACES: &[SyntaxKind] = 20 const BRACES: &[SyntaxKind] =
21 &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>], T![|], T![|]]; 21 &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>], T![|], T![|]];
22 let (brace_token, brace_idx) = file 22 let (brace_token, brace_idx) = file.syntax().token_at_offset(offset).find_map(|node| {
23 .syntax() 23 let idx = BRACES.iter().position(|&brace| brace == node.kind())?;
24 .token_at_offset(offset) 24 Some((node, idx))
25 .filter_map(|node| { 25 })?;
26 let idx = BRACES.iter().position(|&brace| brace == node.kind())?;
27 Some((node, idx))
28 })
29 .next()?;
30 let parent = brace_token.parent()?; 26 let parent = brace_token.parent()?;
31 if brace_token.kind() == T![|] && !ast::ParamList::can_cast(parent.kind()) { 27 if brace_token.kind() == T![|] && !ast::ParamList::can_cast(parent.kind()) {
32 cov_mark::hit!(pipes_not_braces); 28 cov_mark::hit!(pipes_not_braces);
diff --git a/crates/ide/src/parent_module.rs b/crates/ide/src/parent_module.rs
index 99365c8a7..9b1f48044 100644
--- a/crates/ide/src/parent_module.rs
+++ b/crates/ide/src/parent_module.rs
@@ -1,6 +1,8 @@
1use hir::Semantics; 1use hir::Semantics;
2use ide_db::base_db::{CrateId, FileId, FilePosition}; 2use ide_db::{
3use ide_db::RootDatabase; 3 base_db::{CrateId, FileId, FilePosition},
4 RootDatabase,
5};
4use itertools::Itertools; 6use itertools::Itertools;
5use syntax::{ 7use syntax::{
6 algo::find_node_at_offset, 8 algo::find_node_at_offset,
diff --git a/crates/ide/src/references/rename.rs b/crates/ide/src/references/rename.rs
index 2bf953305..01fe3a1a1 100644
--- a/crates/ide/src/references/rename.rs
+++ b/crates/ide/src/references/rename.rs
@@ -55,12 +55,14 @@ pub(crate) fn prepare_rename(
55 match def { 55 match def {
56 Definition::SelfType(_) => bail!("Cannot rename `Self`"), 56 Definition::SelfType(_) => bail!("Cannot rename `Self`"),
57 Definition::ModuleDef(ModuleDef::BuiltinType(_)) => bail!("Cannot rename builtin type"), 57 Definition::ModuleDef(ModuleDef::BuiltinType(_)) => bail!("Cannot rename builtin type"),
58 _ => {} 58 Definition::ModuleDef(ModuleDef::Module(_)) => (),
59 _ => {
60 let nav = def
61 .try_to_nav(sema.db)
62 .ok_or_else(|| format_err!("No references found at position"))?;
63 nav.focus_range.ok_or_else(|| format_err!("No identifier available to rename"))?;
64 }
59 }; 65 };
60 let nav =
61 def.try_to_nav(sema.db).ok_or_else(|| format_err!("No references found at position"))?;
62 nav.focus_range.ok_or_else(|| format_err!("No identifier available to rename"))?;
63
64 let name_like = sema 66 let name_like = sema
65 .find_node_at_offset_with_descend(&syntax, position.offset) 67 .find_node_at_offset_with_descend(&syntax, position.offset)
66 .ok_or_else(|| format_err!("No references found at position"))?; 68 .ok_or_else(|| format_err!("No references found at position"))?;
diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs
index b586dcc17..baed8e217 100644
--- a/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/crates/ide/src/syntax_highlighting/highlight.rs
@@ -227,8 +227,8 @@ pub(super) fn element(
227 k if k.is_keyword() => { 227 k if k.is_keyword() => {
228 let h = Highlight::new(HlTag::Keyword); 228 let h = Highlight::new(HlTag::Keyword);
229 match k { 229 match k {
230 T![await] 230 T![await] => h | HlMod::Async | HlMod::ControlFlow,
231 | T![break] 231 T![break]
232 | T![continue] 232 | T![continue]
233 | T![else] 233 | T![else]
234 | T![if] 234 | T![if]
@@ -255,6 +255,7 @@ pub(super) fn element(
255 }) 255 })
256 .map(|modifier| h | modifier) 256 .map(|modifier| h | modifier)
257 .unwrap_or(h), 257 .unwrap_or(h),
258 T![async] => h | HlMod::Async,
258 _ => h, 259 _ => h,
259 } 260 }
260 } 261 }
@@ -310,6 +311,9 @@ fn highlight_def(db: &RootDatabase, def: Definition) -> Highlight {
310 if func.is_unsafe(db) { 311 if func.is_unsafe(db) {
311 h |= HlMod::Unsafe; 312 h |= HlMod::Unsafe;
312 } 313 }
314 if func.is_async(db) {
315 h |= HlMod::Async;
316 }
313 return h; 317 return h;
314 } 318 }
315 hir::ModuleDef::Adt(hir::Adt::Struct(_)) => HlTag::Symbol(SymbolKind::Struct), 319 hir::ModuleDef::Adt(hir::Adt::Struct(_)) => HlTag::Symbol(SymbolKind::Struct),
@@ -409,6 +413,9 @@ fn highlight_method_call(
409 if func.is_unsafe(sema.db) || sema.is_unsafe_method_call(&method_call) { 413 if func.is_unsafe(sema.db) || sema.is_unsafe_method_call(&method_call) {
410 h |= HlMod::Unsafe; 414 h |= HlMod::Unsafe;
411 } 415 }
416 if func.is_async(sema.db) {
417 h |= HlMod::Async;
418 }
412 if func.as_assoc_item(sema.db).and_then(|it| it.containing_trait(sema.db)).is_some() { 419 if func.as_assoc_item(sema.db).and_then(|it| it.containing_trait(sema.db)).is_some() {
413 h |= HlMod::Trait 420 h |= HlMod::Trait
414 } 421 }
diff --git a/crates/ide/src/syntax_highlighting/inject.rs b/crates/ide/src/syntax_highlighting/inject.rs
index bc221d599..4269d339e 100644
--- a/crates/ide/src/syntax_highlighting/inject.rs
+++ b/crates/ide/src/syntax_highlighting/inject.rs
@@ -6,7 +6,7 @@ use either::Either;
6use hir::{InFile, Semantics}; 6use hir::{InFile, Semantics};
7use ide_db::{call_info::ActiveParameter, helpers::rust_doc::is_rust_fence, SymbolKind}; 7use ide_db::{call_info::ActiveParameter, helpers::rust_doc::is_rust_fence, SymbolKind};
8use syntax::{ 8use syntax::{
9 ast::{self, AstNode}, 9 ast::{self, AstNode, IsString},
10 AstToken, NodeOrToken, SyntaxNode, SyntaxToken, TextRange, TextSize, 10 AstToken, NodeOrToken, SyntaxNode, SyntaxToken, TextRange, TextSize,
11}; 11};
12 12
diff --git a/crates/ide/src/syntax_highlighting/tags.rs b/crates/ide/src/syntax_highlighting/tags.rs
index a304b3250..f4a2e7506 100644
--- a/crates/ide/src/syntax_highlighting/tags.rs
+++ b/crates/ide/src/syntax_highlighting/tags.rs
@@ -65,6 +65,8 @@ pub enum HlMod {
65 Static, 65 Static,
66 /// Used for items in traits and trait impls. 66 /// Used for items in traits and trait impls.
67 Trait, 67 Trait,
68 /// Used with keywords like `async` and `await`.
69 Async,
68 // Keep this last! 70 // Keep this last!
69 /// Used for unsafe functions, mutable statics, union accesses and unsafe operations. 71 /// Used for unsafe functions, mutable statics, union accesses and unsafe operations.
70 Unsafe, 72 Unsafe,
@@ -186,6 +188,7 @@ impl HlMod {
186 HlMod::Mutable, 188 HlMod::Mutable,
187 HlMod::Static, 189 HlMod::Static,
188 HlMod::Trait, 190 HlMod::Trait,
191 HlMod::Async,
189 HlMod::Unsafe, 192 HlMod::Unsafe,
190 ]; 193 ];
191 194
@@ -203,6 +206,7 @@ impl HlMod {
203 HlMod::Mutable => "mutable", 206 HlMod::Mutable => "mutable",
204 HlMod::Static => "static", 207 HlMod::Static => "static",
205 HlMod::Trait => "trait", 208 HlMod::Trait => "trait",
209 HlMod::Async => "async",
206 HlMod::Unsafe => "unsafe", 210 HlMod::Unsafe => "unsafe",
207 } 211 }
208 } 212 }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
index 8d83ba206..921a956e6 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
@@ -37,13 +37,25 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
37 37
38.unresolved_reference { color: #FC5555; text-decoration: wavy underline; } 38.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
39</style> 39</style>
40<pre><code><span class="comment documentation">/// ```</span> 40<pre><code><span class="comment documentation">//! This is a module to test doc injection.</span>
41<span class="comment documentation">//! ```</span>
42<span class="comment documentation">//! </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">test</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
43<span class="comment documentation">//! ```</span>
44
45<span class="comment documentation">/// ```</span>
41<span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"early doctests should not go boom"</span><span class="semicolon injected">;</span> 46<span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"early doctests should not go boom"</span><span class="semicolon injected">;</span>
42<span class="comment documentation">/// ```</span> 47<span class="comment documentation">/// ```</span>
43<span class="keyword">struct</span> <span class="struct declaration">Foo</span> <span class="brace">{</span> 48<span class="keyword">struct</span> <span class="struct declaration">Foo</span> <span class="brace">{</span>
44 <span class="field declaration">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span><span class="comma">,</span> 49 <span class="field declaration">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span><span class="comma">,</span>
45<span class="brace">}</span> 50<span class="brace">}</span>
46 51
52<span class="comment documentation">/// This is an impl with a code block.</span>
53<span class="comment documentation">///</span>
54<span class="comment documentation">/// ```</span>
55<span class="comment documentation">/// </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>
56<span class="comment documentation">///</span>
57<span class="comment documentation">/// </span><span class="brace injected">}</span>
58<span class="comment documentation">/// ```</span>
47<span class="keyword">impl</span> <span class="struct">Foo</span> <span class="brace">{</span> 59<span class="keyword">impl</span> <span class="struct">Foo</span> <span class="brace">{</span>
48 <span class="comment documentation">/// ```</span> 60 <span class="comment documentation">/// ```</span>
49 <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Call me</span> 61 <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Call me</span>
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlighting.html b/crates/ide/src/syntax_highlighting/test_data/highlighting.html
index df4192194..0d325f3f3 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlighting.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlighting.html
@@ -66,11 +66,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
66 <span class="keyword">pub</span> <span class="field declaration">y</span><span class="colon">:</span> <span class="builtin_type">i32</span><span class="comma">,</span> 66 <span class="keyword">pub</span> <span class="field declaration">y</span><span class="colon">:</span> <span class="builtin_type">i32</span><span class="comma">,</span>
67<span class="brace">}</span> 67<span class="brace">}</span>
68 68
69<span class="keyword">trait</span> <span class="trait declaration">Bar</span> <span class="brace">{</span> 69<span class="keyword">trait</span> <span class="trait declaration">Bar</span> <span class="keyword">where</span> <span class="type_param">Self</span><span class="colon">:</span> <span class="brace">{</span>
70 <span class="keyword">fn</span> <span class="function associated declaration trait">bar</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span><span class="semicolon">;</span> 70 <span class="keyword">fn</span> <span class="function associated declaration trait">bar</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span><span class="semicolon">;</span>
71<span class="brace">}</span> 71<span class="brace">}</span>
72 72
73<span class="keyword">impl</span> <span class="trait">Bar</span> <span class="keyword">for</span> <span class="struct">Foo</span> <span class="brace">{</span> 73<span class="keyword">impl</span> <span class="trait">Bar</span> <span class="keyword">for</span> <span class="struct">Foo</span> <span class="keyword">where</span> <span class="self_type">Self</span><span class="colon">:</span> <span class="brace">{</span>
74 <span class="keyword">fn</span> <span class="function associated declaration trait">bar</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span> <span class="brace">{</span> 74 <span class="keyword">fn</span> <span class="function associated declaration trait">bar</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">i32</span> <span class="brace">{</span>
75 <span class="self_keyword">self</span><span class="operator">.</span><span class="field">x</span> 75 <span class="self_keyword">self</span><span class="operator">.</span><span class="field">x</span>
76 <span class="brace">}</span> 76 <span class="brace">}</span>
@@ -234,4 +234,15 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
234 <span class="variable declaration">Nope</span> <span class="operator">=&gt;</span> <span class="variable">Nope</span><span class="comma">,</span> 234 <span class="variable declaration">Nope</span> <span class="operator">=&gt;</span> <span class="variable">Nope</span><span class="comma">,</span>
235 <span class="brace">}</span> 235 <span class="brace">}</span>
236 <span class="brace">}</span> 236 <span class="brace">}</span>
237<span class="brace">}</span>
238
239<span class="keyword async">async</span> <span class="keyword">fn</span> <span class="function declaration async">learn_and_sing</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
240 <span class="keyword">let</span> <span class="variable declaration">song</span> <span class="operator">=</span> <span class="unresolved_reference">learn_song</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="operator">.</span><span class="keyword control async">await</span><span class="semicolon">;</span>
241 <span class="unresolved_reference">sing_song</span><span class="parenthesis">(</span><span class="variable consuming">song</span><span class="parenthesis">)</span><span class="operator">.</span><span class="keyword control async">await</span><span class="semicolon">;</span>
242<span class="brace">}</span>
243
244<span class="keyword async">async</span> <span class="keyword">fn</span> <span class="function declaration async">async_main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
245 <span class="keyword">let</span> <span class="variable declaration">f1</span> <span class="operator">=</span> <span class="function async">learn_and_sing</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
246 <span class="keyword">let</span> <span class="variable declaration">f2</span> <span class="operator">=</span> <span class="unresolved_reference">dance</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
247 futures::<span class="macro">join!</span><span class="parenthesis">(</span>f1<span class="comma">,</span> f2<span class="parenthesis">)</span><span class="semicolon">;</span>
237<span class="brace">}</span></code></pre> \ No newline at end of file 248<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs
index b6e952b08..8c8878d36 100644
--- a/crates/ide/src/syntax_highlighting/tests.rs
+++ b/crates/ide/src/syntax_highlighting/tests.rs
@@ -39,11 +39,11 @@ struct Foo {
39 pub y: i32, 39 pub y: i32,
40} 40}
41 41
42trait Bar { 42trait Bar where Self: {
43 fn bar(&self) -> i32; 43 fn bar(&self) -> i32;
44} 44}
45 45
46impl Bar for Foo { 46impl Bar for Foo where Self: {
47 fn bar(&self) -> i32 { 47 fn bar(&self) -> i32 {
48 self.x 48 self.x
49 } 49 }
@@ -208,6 +208,17 @@ impl<T> Option<T> {
208 } 208 }
209 } 209 }
210} 210}
211
212async fn learn_and_sing() {
213 let song = learn_song().await;
214 sing_song(song).await;
215}
216
217async fn async_main() {
218 let f1 = learn_and_sing();
219 let f2 = dance();
220 futures::join!(f1, f2);
221}
211"# 222"#
212 .trim(), 223 .trim(),
213 expect_file!["./test_data/highlighting.html"], 224 expect_file!["./test_data/highlighting.html"],
@@ -513,6 +524,11 @@ fn main() {
513fn test_highlight_doc_comment() { 524fn test_highlight_doc_comment() {
514 check_highlighting( 525 check_highlighting(
515 r#" 526 r#"
527//! This is a module to test doc injection.
528//! ```
529//! fn test() {}
530//! ```
531
516/// ``` 532/// ```
517/// let _ = "early doctests should not go boom"; 533/// let _ = "early doctests should not go boom";
518/// ``` 534/// ```
@@ -520,6 +536,13 @@ struct Foo {
520 bar: bool, 536 bar: bool,
521} 537}
522 538
539/// This is an impl with a code block.
540///
541/// ```
542/// fn foo() {
543///
544/// }
545/// ```
523impl Foo { 546impl Foo {
524 /// ``` 547 /// ```
525 /// let _ = "Call me 548 /// let _ = "Call me
diff --git a/crates/ide_assists/src/assist_context.rs b/crates/ide_assists/src/assist_context.rs
index 682f0ff5e..20754a02a 100644
--- a/crates/ide_assists/src/assist_context.rs
+++ b/crates/ide_assists/src/assist_context.rs
@@ -238,8 +238,8 @@ impl AssistBuilder {
238 } 238 }
239 } 239 }
240 240
241 pub(crate) fn make_ast_mut<N: AstNode>(&mut self, node: N) -> N { 241 pub(crate) fn make_mut<N: AstNode>(&mut self, node: N) -> N {
242 N::cast(self.make_mut(node.syntax().clone())).unwrap() 242 self.mutated_tree.get_or_insert_with(|| TreeMutator::new(node.syntax())).make_mut(&node)
243 } 243 }
244 /// Returns a copy of the `node`, suitable for mutation. 244 /// Returns a copy of the `node`, suitable for mutation.
245 /// 245 ///
@@ -251,7 +251,7 @@ impl AssistBuilder {
251 /// The typical pattern for an assist is to find specific nodes in the read 251 /// The typical pattern for an assist is to find specific nodes in the read
252 /// phase, and then get their mutable couterparts using `make_mut` in the 252 /// phase, and then get their mutable couterparts using `make_mut` in the
253 /// mutable state. 253 /// mutable state.
254 pub(crate) fn make_mut(&mut self, node: SyntaxNode) -> SyntaxNode { 254 pub(crate) fn make_syntax_mut(&mut self, node: SyntaxNode) -> SyntaxNode {
255 self.mutated_tree.get_or_insert_with(|| TreeMutator::new(&node)).make_syntax_mut(&node) 255 self.mutated_tree.get_or_insert_with(|| TreeMutator::new(&node)).make_syntax_mut(&node)
256 } 256 }
257 257
diff --git a/crates/ide_assists/src/handlers/add_explicit_type.rs b/crates/ide_assists/src/handlers/add_explicit_type.rs
index 62db31952..36589203d 100644
--- a/crates/ide_assists/src/handlers/add_explicit_type.rs
+++ b/crates/ide_assists/src/handlers/add_explicit_type.rs
@@ -198,6 +198,34 @@ fn main() {
198 ) 198 )
199 } 199 }
200 200
201 /// https://github.com/rust-analyzer/rust-analyzer/issues/2922
202 #[test]
203 fn regression_issue_2922() {
204 check_assist(
205 add_explicit_type,
206 r#"
207fn main() {
208 let $0v = [0.0; 2];
209}
210"#,
211 r#"
212fn main() {
213 let v: [f64; 2] = [0.0; 2];
214}
215"#,
216 );
217 // note: this may break later if we add more consteval. it just needs to be something that our
218 // consteval engine doesn't understand
219 check_assist_not_applicable(
220 add_explicit_type,
221 r#"
222fn main() {
223 let $0l = [0.0; 2+2];
224}
225"#,
226 );
227 }
228
201 #[test] 229 #[test]
202 fn default_generics_should_not_be_added() { 230 fn default_generics_should_not_be_added() {
203 check_assist( 231 check_assist(
diff --git a/crates/ide_assists/src/handlers/add_missing_impl_members.rs b/crates/ide_assists/src/handlers/add_missing_impl_members.rs
index 0148635f9..8225ae22c 100644
--- a/crates/ide_assists/src/handlers/add_missing_impl_members.rs
+++ b/crates/ide_assists/src/handlers/add_missing_impl_members.rs
@@ -64,7 +64,6 @@ pub(crate) fn add_missing_impl_members(acc: &mut Assists, ctx: &AssistContext) -
64// impl Trait for () { 64// impl Trait for () {
65// type X = (); 65// type X = ();
66// fn foo(&self) {}$0 66// fn foo(&self) {}$0
67//
68// } 67// }
69// ``` 68// ```
70// -> 69// ->
@@ -195,6 +194,7 @@ impl Foo for S {
195 fn baz(&self) { 194 fn baz(&self) {
196 todo!() 195 todo!()
197 } 196 }
197
198}"#, 198}"#,
199 ); 199 );
200 } 200 }
@@ -231,6 +231,7 @@ impl Foo for S {
231 fn foo(&self) { 231 fn foo(&self) {
232 ${0:todo!()} 232 ${0:todo!()}
233 } 233 }
234
234}"#, 235}"#,
235 ); 236 );
236 } 237 }
diff --git a/crates/ide_assists/src/handlers/auto_import.rs b/crates/ide_assists/src/handlers/auto_import.rs
index a454a2af3..dda5a6631 100644
--- a/crates/ide_assists/src/handlers/auto_import.rs
+++ b/crates/ide_assists/src/handlers/auto_import.rs
@@ -102,8 +102,8 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
102 range, 102 range,
103 |builder| { 103 |builder| {
104 let scope = match scope.clone() { 104 let scope = match scope.clone() {
105 ImportScope::File(it) => ImportScope::File(builder.make_ast_mut(it)), 105 ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
106 ImportScope::Module(it) => ImportScope::Module(builder.make_ast_mut(it)), 106 ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
107 }; 107 };
108 insert_use(&scope, mod_path_to_ast(&import.import_path), ctx.config.insert_use); 108 insert_use(&scope, mod_path_to_ast(&import.import_path), ctx.config.insert_use);
109 }, 109 },
@@ -969,4 +969,30 @@ mod bar {
969"#, 969"#,
970 ); 970 );
971 } 971 }
972
973 #[test]
974 fn uses_abs_path_with_extern_crate_clash() {
975 check_assist(
976 auto_import,
977 r#"
978//- /main.rs crate:main deps:foo
979mod foo {}
980
981const _: () = {
982 Foo$0
983};
984//- /foo.rs crate:foo
985pub struct Foo
986"#,
987 r#"
988use ::foo::Foo;
989
990mod foo {}
991
992const _: () = {
993 Foo
994};
995"#,
996 );
997 }
972} 998}
diff --git a/crates/ide_assists/src/handlers/expand_glob_import.rs b/crates/ide_assists/src/handlers/expand_glob_import.rs
index da8d245e5..79cb08d69 100644
--- a/crates/ide_assists/src/handlers/expand_glob_import.rs
+++ b/crates/ide_assists/src/handlers/expand_glob_import.rs
@@ -61,7 +61,7 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Opti
61 "Expand glob import", 61 "Expand glob import",
62 target.text_range(), 62 target.text_range(),
63 |builder| { 63 |builder| {
64 let use_tree = builder.make_ast_mut(use_tree); 64 let use_tree = builder.make_mut(use_tree);
65 65
66 let names_to_import = find_names_to_import(ctx, refs_in_target, imported_defs); 66 let names_to_import = find_names_to_import(ctx, refs_in_target, imported_defs);
67 let expanded = make::use_tree_list(names_to_import.iter().map(|n| { 67 let expanded = make::use_tree_list(names_to_import.iter().map(|n| {
diff --git a/crates/ide_assists/src/handlers/extract_function.rs b/crates/ide_assists/src/handlers/extract_function.rs
index 6311afc1f..a2dba915c 100644
--- a/crates/ide_assists/src/handlers/extract_function.rs
+++ b/crates/ide_assists/src/handlers/extract_function.rs
@@ -642,6 +642,10 @@ fn vars_used_in_body(ctx: &AssistContext, body: &FunctionBody) -> Vec<Local> {
642 .collect() 642 .collect()
643} 643}
644 644
645fn body_contains_await(body: &FunctionBody) -> bool {
646 body.descendants().any(|d| matches!(d.kind(), SyntaxKind::AWAIT_EXPR))
647}
648
645/// find `self` param, that was not defined inside `body` 649/// find `self` param, that was not defined inside `body`
646/// 650///
647/// It should skip `self` params from impls inside `body` 651/// It should skip `self` params from impls inside `body`
@@ -1123,9 +1127,10 @@ fn format_function(
1123 let params = make_param_list(ctx, module, fun); 1127 let params = make_param_list(ctx, module, fun);
1124 let ret_ty = make_ret_ty(ctx, module, fun); 1128 let ret_ty = make_ret_ty(ctx, module, fun);
1125 let body = make_body(ctx, old_indent, new_indent, fun); 1129 let body = make_body(ctx, old_indent, new_indent, fun);
1130 let async_kw = if body_contains_await(&fun.body) { "async " } else { "" };
1126 match ctx.config.snippet_cap { 1131 match ctx.config.snippet_cap {
1127 Some(_) => format_to!(fn_def, "\n\n{}fn $0{}{}", new_indent, fun.name, params), 1132 Some(_) => format_to!(fn_def, "\n\n{}{}fn $0{}{}", new_indent, async_kw, fun.name, params),
1128 None => format_to!(fn_def, "\n\n{}fn {}{}", new_indent, fun.name, params), 1133 None => format_to!(fn_def, "\n\n{}{}fn {}{}", new_indent, async_kw, fun.name, params),
1129 } 1134 }
1130 if let Some(ret_ty) = ret_ty { 1135 if let Some(ret_ty) = ret_ty {
1131 format_to!(fn_def, " {}", ret_ty); 1136 format_to!(fn_def, " {}", ret_ty);
@@ -3565,4 +3570,60 @@ fn $0fun_name(n: i32) -> i32 {
3565}", 3570}",
3566 ); 3571 );
3567 } 3572 }
3573
3574 #[test]
3575 fn extract_with_await() {
3576 check_assist(
3577 extract_function,
3578 r#"fn main() {
3579 $0some_function().await;$0
3580}
3581
3582async fn some_function() {
3583
3584}
3585"#,
3586 r#"
3587fn main() {
3588 fun_name();
3589}
3590
3591async fn $0fun_name() {
3592 some_function().await;
3593}
3594
3595async fn some_function() {
3596
3597}
3598"#,
3599 );
3600 }
3601
3602 #[test]
3603 fn extract_with_await_in_args() {
3604 check_assist(
3605 extract_function,
3606 r#"fn main() {
3607 $0function_call("a", some_function().await);$0
3608}
3609
3610async fn some_function() {
3611
3612}
3613"#,
3614 r#"
3615fn main() {
3616 fun_name();
3617}
3618
3619async fn $0fun_name() {
3620 function_call("a", some_function().await);
3621}
3622
3623async fn some_function() {
3624
3625}
3626"#,
3627 );
3628 }
3568} 3629}
diff --git a/crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs
index 8e2178391..007aba23d 100644
--- a/crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs
+++ b/crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs
@@ -70,7 +70,7 @@ pub(crate) fn extract_struct_from_enum_variant(
70 continue; 70 continue;
71 } 71 }
72 builder.edit_file(file_id); 72 builder.edit_file(file_id);
73 let source_file = builder.make_ast_mut(ctx.sema.parse(file_id)); 73 let source_file = builder.make_mut(ctx.sema.parse(file_id));
74 let processed = process_references( 74 let processed = process_references(
75 ctx, 75 ctx,
76 &mut visited_modules_set, 76 &mut visited_modules_set,
@@ -84,8 +84,8 @@ pub(crate) fn extract_struct_from_enum_variant(
84 }); 84 });
85 } 85 }
86 builder.edit_file(ctx.frange.file_id); 86 builder.edit_file(ctx.frange.file_id);
87 let source_file = builder.make_ast_mut(ctx.sema.parse(ctx.frange.file_id)); 87 let source_file = builder.make_mut(ctx.sema.parse(ctx.frange.file_id));
88 let variant = builder.make_ast_mut(variant.clone()); 88 let variant = builder.make_mut(variant.clone());
89 if let Some(references) = def_file_references { 89 if let Some(references) = def_file_references {
90 let processed = process_references( 90 let processed = process_references(
91 ctx, 91 ctx,
diff --git a/crates/ide_assists/src/handlers/fill_match_arms.rs b/crates/ide_assists/src/handlers/fill_match_arms.rs
index be927cc1c..58b001050 100644
--- a/crates/ide_assists/src/handlers/fill_match_arms.rs
+++ b/crates/ide_assists/src/handlers/fill_match_arms.rs
@@ -71,6 +71,7 @@ pub(crate) fn fill_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option<
71 .filter_map(|variant| build_pat(ctx.db(), module, variant)) 71 .filter_map(|variant| build_pat(ctx.db(), module, variant))
72 .filter(|variant_pat| is_variant_missing(&top_lvl_pats, variant_pat)) 72 .filter(|variant_pat| is_variant_missing(&top_lvl_pats, variant_pat))
73 .map(|pat| make::match_arm(iter::once(pat), make::expr_empty_block())) 73 .map(|pat| make::match_arm(iter::once(pat), make::expr_empty_block()))
74 .map(|it| it.clone_for_update())
74 .collect::<Vec<_>>(); 75 .collect::<Vec<_>>();
75 if Some(enum_def) 76 if Some(enum_def)
76 == FamousDefs(&ctx.sema, Some(module.krate())) 77 == FamousDefs(&ctx.sema, Some(module.krate()))
@@ -99,6 +100,7 @@ pub(crate) fn fill_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option<
99 }) 100 })
100 .filter(|variant_pat| is_variant_missing(&top_lvl_pats, variant_pat)) 101 .filter(|variant_pat| is_variant_missing(&top_lvl_pats, variant_pat))
101 .map(|pat| make::match_arm(iter::once(pat), make::expr_empty_block())) 102 .map(|pat| make::match_arm(iter::once(pat), make::expr_empty_block()))
103 .map(|it| it.clone_for_update())
102 .collect() 104 .collect()
103 } else { 105 } else {
104 return None; 106 return None;
@@ -114,10 +116,20 @@ pub(crate) fn fill_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option<
114 "Fill match arms", 116 "Fill match arms",
115 target, 117 target,
116 |builder| { 118 |builder| {
117 let new_arm_list = match_arm_list.remove_placeholder(); 119 let new_match_arm_list = match_arm_list.clone_for_update();
118 let n_old_arms = new_arm_list.arms().count(); 120
119 let new_arm_list = new_arm_list.append_arms(missing_arms); 121 let catch_all_arm = new_match_arm_list
120 let first_new_arm = new_arm_list.arms().nth(n_old_arms); 122 .arms()
123 .find(|arm| matches!(arm.pat(), Some(ast::Pat::WildcardPat(_))));
124 if let Some(arm) = catch_all_arm {
125 arm.remove()
126 }
127 let mut first_new_arm = None;
128 for arm in missing_arms {
129 first_new_arm.get_or_insert_with(|| arm.clone());
130 new_match_arm_list.add_arm(arm);
131 }
132
121 let old_range = ctx.sema.original_range(match_arm_list.syntax()).range; 133 let old_range = ctx.sema.original_range(match_arm_list.syntax()).range;
122 match (first_new_arm, ctx.config.snippet_cap) { 134 match (first_new_arm, ctx.config.snippet_cap) {
123 (Some(first_new_arm), Some(cap)) => { 135 (Some(first_new_arm), Some(cap)) => {
@@ -131,10 +143,10 @@ pub(crate) fn fill_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option<
131 } 143 }
132 None => Cursor::Before(first_new_arm.syntax()), 144 None => Cursor::Before(first_new_arm.syntax()),
133 }; 145 };
134 let snippet = render_snippet(cap, new_arm_list.syntax(), cursor); 146 let snippet = render_snippet(cap, new_match_arm_list.syntax(), cursor);
135 builder.replace_snippet(cap, old_range, snippet); 147 builder.replace_snippet(cap, old_range, snippet);
136 } 148 }
137 _ => builder.replace(old_range, new_arm_list.to_string()), 149 _ => builder.replace(old_range, new_match_arm_list.to_string()),
138 } 150 }
139 }, 151 },
140 ) 152 )
@@ -263,18 +275,18 @@ mod tests {
263 check_assist_not_applicable( 275 check_assist_not_applicable(
264 fill_match_arms, 276 fill_match_arms,
265 r#" 277 r#"
266 enum A { 278enum A {
267 As, 279 As,
268 Bs{x:i32, y:Option<i32>}, 280 Bs{x:i32, y:Option<i32>},
269 Cs(i32, Option<i32>), 281 Cs(i32, Option<i32>),
270 } 282}
271 fn main() { 283fn main() {
272 match A::As$0 { 284 match A::As$0 {
273 A::As, 285 A::As,
274 A::Bs{x,y:Some(_)} => {} 286 A::Bs{x,y:Some(_)} => {}
275 A::Cs(_, Some(_)) => {} 287 A::Cs(_, Some(_)) => {}
276 } 288 }
277 } 289}
278 "#, 290 "#,
279 ); 291 );
280 } 292 }
@@ -284,13 +296,13 @@ mod tests {
284 check_assist_not_applicable( 296 check_assist_not_applicable(
285 fill_match_arms, 297 fill_match_arms,
286 r#" 298 r#"
287 fn foo(a: bool) { 299fn foo(a: bool) {
288 match a$0 { 300 match a$0 {
289 true => {} 301 true => {}
290 false => {} 302 false => {}
291 } 303 }
292 } 304}
293 "#, 305"#,
294 ) 306 )
295 } 307 }
296 308
@@ -301,11 +313,11 @@ mod tests {
301 check_assist_not_applicable( 313 check_assist_not_applicable(
302 fill_match_arms, 314 fill_match_arms,
303 r#" 315 r#"
304 fn main() { 316fn main() {
305 match (0, false)$0 { 317 match (0, false)$0 {
306 } 318 }
307 } 319}
308 "#, 320"#,
309 ); 321 );
310 } 322 }
311 323
@@ -314,19 +326,19 @@ mod tests {
314 check_assist( 326 check_assist(
315 fill_match_arms, 327 fill_match_arms,
316 r#" 328 r#"
317 fn foo(a: bool) { 329fn foo(a: bool) {
318 match a$0 { 330 match a$0 {
319 } 331 }
320 } 332}
321 "#, 333"#,
322 r#" 334 r#"
323 fn foo(a: bool) { 335fn foo(a: bool) {
324 match a { 336 match a {
325 $0true => {} 337 $0true => {}
326 false => {} 338 false => {}
327 } 339 }
328 } 340}
329 "#, 341"#,
330 ) 342 )
331 } 343 }
332 344
@@ -335,20 +347,20 @@ mod tests {
335 check_assist( 347 check_assist(
336 fill_match_arms, 348 fill_match_arms,
337 r#" 349 r#"
338 fn foo(a: bool) { 350fn foo(a: bool) {
339 match a$0 { 351 match a$0 {
340 true => {} 352 true => {}
341 } 353 }
342 } 354}
343 "#, 355"#,
344 r#" 356 r#"
345 fn foo(a: bool) { 357fn foo(a: bool) {
346 match a { 358 match a {
347 true => {} 359 true => {}
348 $0false => {} 360 $0false => {}
349 } 361 }
350 } 362}
351 "#, 363"#,
352 ) 364 )
353 } 365 }
354 366
@@ -357,15 +369,15 @@ mod tests {
357 check_assist_not_applicable( 369 check_assist_not_applicable(
358 fill_match_arms, 370 fill_match_arms,
359 r#" 371 r#"
360 fn foo(a: bool) { 372fn foo(a: bool) {
361 match (a, a)$0 { 373 match (a, a)$0 {
362 (true, true) => {} 374 (true, true) => {}
363 (true, false) => {} 375 (true, false) => {}
364 (false, true) => {} 376 (false, true) => {}
365 (false, false) => {} 377 (false, false) => {}
366 } 378 }
367 } 379}
368 "#, 380"#,
369 ) 381 )
370 } 382 }
371 383
@@ -374,21 +386,21 @@ mod tests {
374 check_assist( 386 check_assist(
375 fill_match_arms, 387 fill_match_arms,
376 r#" 388 r#"
377 fn foo(a: bool) { 389fn foo(a: bool) {
378 match (a, a)$0 { 390 match (a, a)$0 {
379 } 391 }
380 } 392}
381 "#, 393"#,
382 r#" 394 r#"
383 fn foo(a: bool) { 395fn foo(a: bool) {
384 match (a, a) { 396 match (a, a) {
385 $0(true, true) => {} 397 $0(true, true) => {}
386 (true, false) => {} 398 (true, false) => {}
387 (false, true) => {} 399 (false, true) => {}
388 (false, false) => {} 400 (false, false) => {}
389 } 401 }
390 } 402}
391 "#, 403"#,
392 ) 404 )
393 } 405 }
394 406
@@ -397,22 +409,22 @@ mod tests {
397 check_assist( 409 check_assist(
398 fill_match_arms, 410 fill_match_arms,
399 r#" 411 r#"
400 fn foo(a: bool) { 412fn foo(a: bool) {
401 match (a, a)$0 { 413 match (a, a)$0 {
402 (false, true) => {} 414 (false, true) => {}
403 } 415 }
404 } 416}
405 "#, 417"#,
406 r#" 418 r#"
407 fn foo(a: bool) { 419fn foo(a: bool) {
408 match (a, a) { 420 match (a, a) {
409 (false, true) => {} 421 (false, true) => {}
410 $0(true, true) => {} 422 $0(true, true) => {}
411 (true, false) => {} 423 (true, false) => {}
412 (false, false) => {} 424 (false, false) => {}
413 } 425 }
414 } 426}
415 "#, 427"#,
416 ) 428 )
417 } 429 }
418 430
@@ -421,32 +433,32 @@ mod tests {
421 check_assist( 433 check_assist(
422 fill_match_arms, 434 fill_match_arms,
423 r#" 435 r#"
424 enum A { 436enum A {
425 As, 437 As,
426 Bs { x: i32, y: Option<i32> }, 438 Bs { x: i32, y: Option<i32> },
427 Cs(i32, Option<i32>), 439 Cs(i32, Option<i32>),
428 } 440}
429 fn main() { 441fn main() {
430 match A::As$0 { 442 match A::As$0 {
431 A::Bs { x, y: Some(_) } => {} 443 A::Bs { x, y: Some(_) } => {}
432 A::Cs(_, Some(_)) => {} 444 A::Cs(_, Some(_)) => {}
433 } 445 }
434 } 446}
435 "#, 447"#,
436 r#" 448 r#"
437 enum A { 449enum A {
438 As, 450 As,
439 Bs { x: i32, y: Option<i32> }, 451 Bs { x: i32, y: Option<i32> },
440 Cs(i32, Option<i32>), 452 Cs(i32, Option<i32>),
441 } 453}
442 fn main() { 454fn main() {
443 match A::As { 455 match A::As {
444 A::Bs { x, y: Some(_) } => {} 456 A::Bs { x, y: Some(_) } => {}
445 A::Cs(_, Some(_)) => {} 457 A::Cs(_, Some(_)) => {}
446 $0A::As => {} 458 $0A::As => {}
447 } 459 }
448 } 460}
449 "#, 461"#,
450 ); 462 );
451 } 463 }
452 464
@@ -593,30 +605,30 @@ fn main() {
593 check_assist( 605 check_assist(
594 fill_match_arms, 606 fill_match_arms,
595 r#" 607 r#"
596 enum A { One, Two } 608enum A { One, Two }
597 enum B { One, Two } 609enum B { One, Two }
598 610
599 fn main() { 611fn main() {
600 let a = A::One; 612 let a = A::One;
601 let b = B::One; 613 let b = B::One;
602 match (a$0, b) {} 614 match (a$0, b) {}
603 } 615}
604 "#, 616"#,
605 r#" 617 r#"
606 enum A { One, Two } 618enum A { One, Two }
607 enum B { One, Two } 619enum B { One, Two }
608 620
609 fn main() { 621fn main() {
610 let a = A::One; 622 let a = A::One;
611 let b = B::One; 623 let b = B::One;
612 match (a, b) { 624 match (a, b) {
613 $0(A::One, B::One) => {} 625 $0(A::One, B::One) => {}
614 (A::One, B::Two) => {} 626 (A::One, B::Two) => {}
615 (A::Two, B::One) => {} 627 (A::Two, B::One) => {}
616 (A::Two, B::Two) => {} 628 (A::Two, B::Two) => {}
617 } 629 }
618 } 630}
619 "#, 631"#,
620 ); 632 );
621 } 633 }
622 634
@@ -625,30 +637,30 @@ fn main() {
625 check_assist( 637 check_assist(
626 fill_match_arms, 638 fill_match_arms,
627 r#" 639 r#"
628 enum A { One, Two } 640enum A { One, Two }
629 enum B { One, Two } 641enum B { One, Two }
630 642
631 fn main() { 643fn main() {
632 let a = A::One; 644 let a = A::One;
633 let b = B::One; 645 let b = B::One;
634 match (&a$0, &b) {} 646 match (&a$0, &b) {}
635 } 647}
636 "#, 648"#,
637 r#" 649 r#"
638 enum A { One, Two } 650enum A { One, Two }
639 enum B { One, Two } 651enum B { One, Two }
640 652
641 fn main() { 653fn main() {
642 let a = A::One; 654 let a = A::One;
643 let b = B::One; 655 let b = B::One;
644 match (&a, &b) { 656 match (&a, &b) {
645 $0(A::One, B::One) => {} 657 $0(A::One, B::One) => {}
646 (A::One, B::Two) => {} 658 (A::One, B::Two) => {}
647 (A::Two, B::One) => {} 659 (A::Two, B::One) => {}
648 (A::Two, B::Two) => {} 660 (A::Two, B::Two) => {}
649 } 661 }
650 } 662}
651 "#, 663"#,
652 ); 664 );
653 } 665 }
654 666
@@ -737,20 +749,20 @@ fn main() {
737 check_assist_not_applicable( 749 check_assist_not_applicable(
738 fill_match_arms, 750 fill_match_arms,
739 r#" 751 r#"
740 enum A { One, Two } 752enum A { One, Two }
741 enum B { One, Two } 753enum B { One, Two }
742 754
743 fn main() { 755fn main() {
744 let a = A::One; 756 let a = A::One;
745 let b = B::One; 757 let b = B::One;
746 match (a$0, b) { 758 match (a$0, b) {
747 (A::Two, B::One) => {} 759 (A::Two, B::One) => {}
748 (A::One, B::One) => {} 760 (A::One, B::One) => {}
749 (A::One, B::Two) => {} 761 (A::One, B::Two) => {}
750 (A::Two, B::Two) => {} 762 (A::Two, B::Two) => {}
751 } 763 }
752 } 764}
753 "#, 765"#,
754 ); 766 );
755 } 767 }
756 768
@@ -759,25 +771,25 @@ fn main() {
759 check_assist( 771 check_assist(
760 fill_match_arms, 772 fill_match_arms,
761 r#" 773 r#"
762 enum A { One, Two } 774enum A { One, Two }
763 775
764 fn main() { 776fn main() {
765 let a = A::One; 777 let a = A::One;
766 match (a$0, ) { 778 match (a$0, ) {
767 } 779 }
768 } 780}
769 "#, 781"#,
770 r#" 782 r#"
771 enum A { One, Two } 783enum A { One, Two }
772 784
773 fn main() { 785fn main() {
774 let a = A::One; 786 let a = A::One;
775 match (a, ) { 787 match (a, ) {
776 $0(A::One,) => {} 788 $0(A::One,) => {}
777 (A::Two,) => {} 789 (A::Two,) => {}
778 } 790 }
779 } 791}
780 "#, 792"#,
781 ); 793 );
782 } 794 }
783 795
@@ -786,47 +798,47 @@ fn main() {
786 check_assist( 798 check_assist(
787 fill_match_arms, 799 fill_match_arms,
788 r#" 800 r#"
789 enum A { As } 801enum A { As }
790 802
791 fn foo(a: &A) { 803fn foo(a: &A) {
792 match a$0 { 804 match a$0 {
793 } 805 }
794 } 806}
795 "#, 807"#,
796 r#" 808 r#"
797 enum A { As } 809enum A { As }
798 810
799 fn foo(a: &A) { 811fn foo(a: &A) {
800 match a { 812 match a {
801 $0A::As => {} 813 $0A::As => {}
802 } 814 }
803 } 815}
804 "#, 816"#,
805 ); 817 );
806 818
807 check_assist( 819 check_assist(
808 fill_match_arms, 820 fill_match_arms,
809 r#" 821 r#"
810 enum A { 822enum A {
811 Es { x: usize, y: usize } 823 Es { x: usize, y: usize }
812 } 824}
813 825
814 fn foo(a: &mut A) { 826fn foo(a: &mut A) {
815 match a$0 { 827 match a$0 {
816 } 828 }
817 } 829}
818 "#, 830"#,
819 r#" 831 r#"
820 enum A { 832enum A {
821 Es { x: usize, y: usize } 833 Es { x: usize, y: usize }
822 } 834}
823 835
824 fn foo(a: &mut A) { 836fn foo(a: &mut A) {
825 match a { 837 match a {
826 $0A::Es { x, y } => {} 838 $0A::Es { x, y } => {}
827 } 839 }
828 } 840}
829 "#, 841"#,
830 ); 842 );
831 } 843 }
832 844
@@ -835,12 +847,12 @@ fn main() {
835 check_assist_target( 847 check_assist_target(
836 fill_match_arms, 848 fill_match_arms,
837 r#" 849 r#"
838 enum E { X, Y } 850enum E { X, Y }
839 851
840 fn main() { 852fn main() {
841 match E::X$0 {} 853 match E::X$0 {}
842 } 854}
843 "#, 855"#,
844 "match E::X {}", 856 "match E::X {}",
845 ); 857 );
846 } 858 }
@@ -850,24 +862,24 @@ fn main() {
850 check_assist( 862 check_assist(
851 fill_match_arms, 863 fill_match_arms,
852 r#" 864 r#"
853 enum E { X, Y } 865enum E { X, Y }
854 866
855 fn main() { 867fn main() {
856 match E::X { 868 match E::X {
857 $0_ => {} 869 $0_ => {}
858 } 870 }
859 } 871}
860 "#, 872"#,
861 r#" 873 r#"
862 enum E { X, Y } 874enum E { X, Y }
863 875
864 fn main() { 876fn main() {
865 match E::X { 877 match E::X {
866 $0E::X => {} 878 $0E::X => {}
867 E::Y => {} 879 E::Y => {}
868 } 880 }
869 } 881}
870 "#, 882"#,
871 ); 883 );
872 } 884 }
873 885
@@ -876,26 +888,26 @@ fn main() {
876 check_assist( 888 check_assist(
877 fill_match_arms, 889 fill_match_arms,
878 r#" 890 r#"
879 mod foo { pub enum E { X, Y } } 891mod foo { pub enum E { X, Y } }
880 use foo::E::X; 892use foo::E::X;
881 893
882 fn main() { 894fn main() {
883 match X { 895 match X {
884 $0 896 $0
885 } 897 }
886 } 898}
887 "#, 899"#,
888 r#" 900 r#"
889 mod foo { pub enum E { X, Y } } 901mod foo { pub enum E { X, Y } }
890 use foo::E::X; 902use foo::E::X;
891 903
892 fn main() { 904fn main() {
893 match X { 905 match X {
894 $0X => {} 906 $0X => {}
895 foo::E::Y => {} 907 foo::E::Y => {}
896 } 908 }
897 } 909}
898 "#, 910"#,
899 ); 911 );
900 } 912 }
901 913
@@ -904,26 +916,26 @@ fn main() {
904 check_assist( 916 check_assist(
905 fill_match_arms, 917 fill_match_arms,
906 r#" 918 r#"
907 enum A { One, Two } 919enum A { One, Two }
908 fn foo(a: A) { 920fn foo(a: A) {
909 match a { 921 match a {
910 // foo bar baz$0 922 // foo bar baz$0
911 A::One => {} 923 A::One => {}
912 // This is where the rest should be 924 // This is where the rest should be
913 } 925 }
914 } 926}
915 "#, 927"#,
916 r#" 928 r#"
917 enum A { One, Two } 929enum A { One, Two }
918 fn foo(a: A) { 930fn foo(a: A) {
919 match a { 931 match a {
920 // foo bar baz 932 // foo bar baz
921 A::One => {} 933 A::One => {}
922 // This is where the rest should be 934 $0A::Two => {}
923 $0A::Two => {} 935 // This is where the rest should be
924 } 936 }
925 } 937}
926 "#, 938"#,
927 ); 939 );
928 } 940 }
929 941
@@ -932,23 +944,23 @@ fn main() {
932 check_assist( 944 check_assist(
933 fill_match_arms, 945 fill_match_arms,
934 r#" 946 r#"
935 enum A { One, Two } 947enum A { One, Two }
936 fn foo(a: A) { 948fn foo(a: A) {
937 match a { 949 match a {
938 // foo bar baz$0 950 // foo bar baz$0
939 } 951 }
940 } 952}
941 "#, 953"#,
942 r#" 954 r#"
943 enum A { One, Two } 955enum A { One, Two }
944 fn foo(a: A) { 956fn foo(a: A) {
945 match a { 957 match a {
946 // foo bar baz 958 $0A::One => {}
947 $0A::One => {} 959 A::Two => {}
948 A::Two => {} 960 // foo bar baz
949 } 961 }
950 } 962}
951 "#, 963"#,
952 ); 964 );
953 } 965 }
954 966
@@ -957,22 +969,22 @@ fn main() {
957 check_assist( 969 check_assist(
958 fill_match_arms, 970 fill_match_arms,
959 r#" 971 r#"
960 enum A { One, Two, } 972enum A { One, Two, }
961 fn foo(a: A) { 973fn foo(a: A) {
962 match a$0 { 974 match a$0 {
963 _ => (), 975 _ => (),
964 } 976 }
965 } 977}
966 "#, 978"#,
967 r#" 979 r#"
968 enum A { One, Two, } 980enum A { One, Two, }
969 fn foo(a: A) { 981fn foo(a: A) {
970 match a { 982 match a {
971 $0A::One => {} 983 $0A::One => {}
972 A::Two => {} 984 A::Two => {}
973 } 985 }
974 } 986}
975 "#, 987"#,
976 ); 988 );
977 } 989 }
978 990
@@ -1016,7 +1028,8 @@ enum Test {
1016fn foo(t: Test) { 1028fn foo(t: Test) {
1017 m!(match t$0 {}); 1029 m!(match t$0 {});
1018}"#, 1030}"#,
1019 r#"macro_rules! m { ($expr:expr) => {$expr}} 1031 r#"
1032macro_rules! m { ($expr:expr) => {$expr}}
1020enum Test { 1033enum Test {
1021 A, 1034 A,
1022 B, 1035 B,
diff --git a/crates/ide_assists/src/handlers/generate_default_from_new.rs b/crates/ide_assists/src/handlers/generate_default_from_new.rs
index dc14552d6..bad826366 100644
--- a/crates/ide_assists/src/handlers/generate_default_from_new.rs
+++ b/crates/ide_assists/src/handlers/generate_default_from_new.rs
@@ -3,8 +3,10 @@ use crate::{
3 AssistId, 3 AssistId,
4}; 4};
5use ide_db::helpers::FamousDefs; 5use ide_db::helpers::FamousDefs;
6use itertools::Itertools;
7use stdx::format_to;
6use syntax::{ 8use syntax::{
7 ast::{self, Impl, NameOwner}, 9 ast::{self, GenericParamsOwner, Impl, NameOwner, TypeBoundsOwner},
8 AstNode, 10 AstNode,
9}; 11};
10 12
@@ -65,23 +67,56 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext)
65 "Generate a Default impl from a new fn", 67 "Generate a Default impl from a new fn",
66 insert_location, 68 insert_location,
67 move |builder| { 69 move |builder| {
68 let code = default_fn_node_for_new(impl_); 70 let default_code = " fn default() -> Self {
71 Self::new()
72 }";
73 let code = generate_trait_impl_text_from_impl(&impl_, "Default", default_code);
69 builder.insert(insert_location.end(), code); 74 builder.insert(insert_location.end(), code);
70 }, 75 },
71 ) 76 )
72} 77}
73 78
74fn default_fn_node_for_new(impl_: Impl) -> String { 79fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code: &str) -> String {
75 format!( 80 let generic_params = impl_.generic_param_list();
76 " 81 let mut buf = String::with_capacity(code.len());
82 buf.push_str("\n\n");
83 buf.push_str("impl");
84
85 if let Some(generic_params) = &generic_params {
86 let lifetimes = generic_params.lifetime_params().map(|lt| format!("{}", lt.syntax()));
87 let type_params = generic_params.type_params().map(|type_param| {
88 let mut buf = String::new();
89 if let Some(it) = type_param.name() {
90 format_to!(buf, "{}", it.syntax());
91 }
92 if let Some(it) = type_param.colon_token() {
93 format_to!(buf, "{} ", it);
94 }
95 if let Some(it) = type_param.type_bound_list() {
96 format_to!(buf, "{}", it.syntax());
97 }
98 buf
99 });
100 let const_params = generic_params.const_params().map(|t| t.syntax().to_string());
101 let generics = lifetimes.chain(type_params).chain(const_params).format(", ");
102 format_to!(buf, "<{}>", generics);
103 }
104
105 buf.push(' ');
106 buf.push_str(trait_text);
107 buf.push_str(" for ");
108 buf.push_str(&impl_.self_ty().unwrap().syntax().text().to_string());
109
110 match impl_.where_clause() {
111 Some(where_clause) => {
112 format_to!(buf, "\n{}\n{{\n{}\n}}", where_clause, code);
113 }
114 None => {
115 format_to!(buf, " {{\n{}\n}}", code);
116 }
117 }
77 118
78impl Default for {} {{ 119 buf
79 fn default() -> Self {{
80 Self::new()
81 }}
82}}",
83 impl_.self_ty().unwrap().syntax().text()
84 )
85} 120}
86 121
87fn is_default_implemented(ctx: &AssistContext, impl_: &Impl) -> bool { 122fn is_default_implemented(ctx: &AssistContext, impl_: &Impl) -> bool {
@@ -176,6 +211,234 @@ impl Default for Test {
176 } 211 }
177 212
178 #[test] 213 #[test]
214 fn new_function_with_generic() {
215 check_pass(
216 r#"
217pub struct Foo<T> {
218 _bar: *mut T,
219}
220
221impl<T> Foo<T> {
222 pub fn ne$0w() -> Self {
223 unimplemented!()
224 }
225}
226"#,
227 r#"
228pub struct Foo<T> {
229 _bar: *mut T,
230}
231
232impl<T> Foo<T> {
233 pub fn new() -> Self {
234 unimplemented!()
235 }
236}
237
238impl<T> Default for Foo<T> {
239 fn default() -> Self {
240 Self::new()
241 }
242}
243"#,
244 );
245 }
246
247 #[test]
248 fn new_function_with_generics() {
249 check_pass(
250 r#"
251pub struct Foo<T, B> {
252 _tars: *mut T,
253 _bar: *mut B,
254}
255
256impl<T, B> Foo<T, B> {
257 pub fn ne$0w() -> Self {
258 unimplemented!()
259 }
260}
261"#,
262 r#"
263pub struct Foo<T, B> {
264 _tars: *mut T,
265 _bar: *mut B,
266}
267
268impl<T, B> Foo<T, B> {
269 pub fn new() -> Self {
270 unimplemented!()
271 }
272}
273
274impl<T, B> Default for Foo<T, B> {
275 fn default() -> Self {
276 Self::new()
277 }
278}
279"#,
280 );
281 }
282
283 #[test]
284 fn new_function_with_generic_and_bound() {
285 check_pass(
286 r#"
287pub struct Foo<T> {
288 t: T,
289}
290
291impl<T: From<i32>> Foo<T> {
292 pub fn ne$0w() -> Self {
293 Foo { t: 0.into() }
294 }
295}
296"#,
297 r#"
298pub struct Foo<T> {
299 t: T,
300}
301
302impl<T: From<i32>> Foo<T> {
303 pub fn new() -> Self {
304 Foo { t: 0.into() }
305 }
306}
307
308impl<T: From<i32>> Default for Foo<T> {
309 fn default() -> Self {
310 Self::new()
311 }
312}
313"#,
314 );
315 }
316
317 #[test]
318 fn new_function_with_generics_and_bounds() {
319 check_pass(
320 r#"
321pub struct Foo<T, B> {
322 _tars: T,
323 _bar: B,
324}
325
326impl<T: From<i32>, B: From<i64>> Foo<T, B> {
327 pub fn ne$0w() -> Self {
328 unimplemented!()
329 }
330}
331"#,
332 r#"
333pub struct Foo<T, B> {
334 _tars: T,
335 _bar: B,
336}
337
338impl<T: From<i32>, B: From<i64>> Foo<T, B> {
339 pub fn new() -> Self {
340 unimplemented!()
341 }
342}
343
344impl<T: From<i32>, B: From<i64>> Default for Foo<T, B> {
345 fn default() -> Self {
346 Self::new()
347 }
348}
349"#,
350 );
351 }
352
353 #[test]
354 fn new_function_with_generic_and_where() {
355 check_pass(
356 r#"
357pub struct Foo<T> {
358 t: T,
359}
360
361impl<T: From<i32>> Foo<T>
362where
363 Option<T>: Debug
364{
365 pub fn ne$0w() -> Self {
366 Foo { t: 0.into() }
367 }
368}
369"#,
370 r#"
371pub struct Foo<T> {
372 t: T,
373}
374
375impl<T: From<i32>> Foo<T>
376where
377 Option<T>: Debug
378{
379 pub fn new() -> Self {
380 Foo { t: 0.into() }
381 }
382}
383
384impl<T: From<i32>> Default for Foo<T>
385where
386 Option<T>: Debug
387{
388 fn default() -> Self {
389 Self::new()
390 }
391}
392"#,
393 );
394 }
395
396 #[test]
397 fn new_function_with_generics_and_wheres() {
398 check_pass(
399 r#"
400pub struct Foo<T, B> {
401 _tars: T,
402 _bar: B,
403}
404
405impl<T: From<i32>, B: From<i64>> Foo<T, B>
406where
407 Option<T>: Debug, Option<B>: Debug,
408{
409 pub fn ne$0w() -> Self {
410 unimplemented!()
411 }
412}
413"#,
414 r#"
415pub struct Foo<T, B> {
416 _tars: T,
417 _bar: B,
418}
419
420impl<T: From<i32>, B: From<i64>> Foo<T, B>
421where
422 Option<T>: Debug, Option<B>: Debug,
423{
424 pub fn new() -> Self {
425 unimplemented!()
426 }
427}
428
429impl<T: From<i32>, B: From<i64>> Default for Foo<T, B>
430where
431 Option<T>: Debug, Option<B>: Debug,
432{
433 fn default() -> Self {
434 Self::new()
435 }
436}
437"#,
438 );
439 }
440
441 #[test]
179 fn new_function_with_parameters() { 442 fn new_function_with_parameters() {
180 cov_mark::check!(new_function_with_parameters); 443 cov_mark::check!(new_function_with_parameters);
181 check_not_applicable( 444 check_not_applicable(
diff --git a/crates/ide_assists/src/handlers/generate_new.rs b/crates/ide_assists/src/handlers/generate_new.rs
index 8ce5930b7..959a1f86c 100644
--- a/crates/ide_assists/src/handlers/generate_new.rs
+++ b/crates/ide_assists/src/handlers/generate_new.rs
@@ -1,4 +1,3 @@
1use ast::Adt;
2use itertools::Itertools; 1use itertools::Itertools;
3use stdx::format_to; 2use stdx::format_to;
4use syntax::ast::{self, AstNode, NameOwner, StructKind, VisibilityOwner}; 3use syntax::ast::{self, AstNode, NameOwner, StructKind, VisibilityOwner};
@@ -37,7 +36,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
37 }; 36 };
38 37
39 // Return early if we've found an existing new fn 38 // Return early if we've found an existing new fn
40 let impl_def = find_struct_impl(&ctx, &Adt::Struct(strukt.clone()), "new")?; 39 let impl_def = find_struct_impl(&ctx, &ast::Adt::Struct(strukt.clone()), "new")?;
41 40
42 let target = strukt.syntax().text_range(); 41 let target = strukt.syntax().text_range();
43 acc.add(AssistId("generate_new", AssistKind::Generate), "Generate `new`", target, |builder| { 42 acc.add(AssistId("generate_new", AssistKind::Generate), "Generate `new`", target, |builder| {
@@ -60,7 +59,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
60 let start_offset = impl_def 59 let start_offset = impl_def
61 .and_then(|impl_def| find_impl_block_start(impl_def, &mut buf)) 60 .and_then(|impl_def| find_impl_block_start(impl_def, &mut buf))
62 .unwrap_or_else(|| { 61 .unwrap_or_else(|| {
63 buf = generate_impl_text(&Adt::Struct(strukt.clone()), &buf); 62 buf = generate_impl_text(&ast::Adt::Struct(strukt.clone()), &buf);
64 strukt.syntax().text_range().end() 63 strukt.syntax().text_range().end()
65 }); 64 });
66 65
@@ -81,101 +80,132 @@ mod tests {
81 use super::*; 80 use super::*;
82 81
83 #[test] 82 #[test]
84 #[rustfmt::skip]
85 fn test_generate_new() { 83 fn test_generate_new() {
86 // Check output of generation
87 check_assist( 84 check_assist(
88 generate_new, 85 generate_new,
89"struct Foo {$0}", 86 r#"
90"struct Foo {} 87struct Foo {$0}
88"#,
89 r#"
90struct Foo {}
91 91
92impl Foo { 92impl Foo {
93 fn $0new() -> Self { Self { } } 93 fn $0new() -> Self { Self { } }
94}", 94}
95"#,
95 ); 96 );
96 check_assist( 97 check_assist(
97 generate_new, 98 generate_new,
98"struct Foo<T: Clone> {$0}", 99 r#"
99"struct Foo<T: Clone> {} 100struct Foo<T: Clone> {$0}
101"#,
102 r#"
103struct Foo<T: Clone> {}
100 104
101impl<T: Clone> Foo<T> { 105impl<T: Clone> Foo<T> {
102 fn $0new() -> Self { Self { } } 106 fn $0new() -> Self { Self { } }
103}", 107}
108"#,
104 ); 109 );
105 check_assist( 110 check_assist(
106 generate_new, 111 generate_new,
107"struct Foo<'a, T: Foo<'a>> {$0}", 112 r#"
108"struct Foo<'a, T: Foo<'a>> {} 113struct Foo<'a, T: Foo<'a>> {$0}
114"#,
115 r#"
116struct Foo<'a, T: Foo<'a>> {}
109 117
110impl<'a, T: Foo<'a>> Foo<'a, T> { 118impl<'a, T: Foo<'a>> Foo<'a, T> {
111 fn $0new() -> Self { Self { } } 119 fn $0new() -> Self { Self { } }
112}", 120}
121"#,
113 ); 122 );
114 check_assist( 123 check_assist(
115 generate_new, 124 generate_new,
116"struct Foo { baz: String $0}", 125 r#"
117"struct Foo { baz: String } 126struct Foo { baz: String $0}
127"#,
128 r#"
129struct Foo { baz: String }
118 130
119impl Foo { 131impl Foo {
120 fn $0new(baz: String) -> Self { Self { baz } } 132 fn $0new(baz: String) -> Self { Self { baz } }
121}", 133}
134"#,
122 ); 135 );
123 check_assist( 136 check_assist(
124 generate_new, 137 generate_new,
125"struct Foo { baz: String, qux: Vec<i32> $0}", 138 r#"
126"struct Foo { baz: String, qux: Vec<i32> } 139struct Foo { baz: String, qux: Vec<i32> $0}
140"#,
141 r#"
142struct Foo { baz: String, qux: Vec<i32> }
127 143
128impl Foo { 144impl Foo {
129 fn $0new(baz: String, qux: Vec<i32>) -> Self { Self { baz, qux } } 145 fn $0new(baz: String, qux: Vec<i32>) -> Self { Self { baz, qux } }
130}", 146}
147"#,
131 ); 148 );
149 }
132 150
133 // Check that visibility modifiers don't get brought in for fields 151 #[test]
152 fn check_that_visibility_modifiers_dont_get_brought_in() {
134 check_assist( 153 check_assist(
135 generate_new, 154 generate_new,
136"struct Foo { pub baz: String, pub qux: Vec<i32> $0}", 155 r#"
137"struct Foo { pub baz: String, pub qux: Vec<i32> } 156struct Foo { pub baz: String, pub qux: Vec<i32> $0}
157"#,
158 r#"
159struct Foo { pub baz: String, pub qux: Vec<i32> }
138 160
139impl Foo { 161impl Foo {
140 fn $0new(baz: String, qux: Vec<i32>) -> Self { Self { baz, qux } } 162 fn $0new(baz: String, qux: Vec<i32>) -> Self { Self { baz, qux } }
141}", 163}
164"#,
142 ); 165 );
166 }
143 167
144 // Check that it reuses existing impls 168 #[test]
169 fn check_it_reuses_existing_impls() {
145 check_assist( 170 check_assist(
146 generate_new, 171 generate_new,
147"struct Foo {$0} 172 r#"
173struct Foo {$0}
148 174
149impl Foo {} 175impl Foo {}
150", 176"#,
151"struct Foo {} 177 r#"
178struct Foo {}
152 179
153impl Foo { 180impl Foo {
154 fn $0new() -> Self { Self { } } 181 fn $0new() -> Self { Self { } }
155} 182}
156", 183"#,
157 ); 184 );
158 check_assist( 185 check_assist(
159 generate_new, 186 generate_new,
160"struct Foo {$0} 187 r#"
188struct Foo {$0}
161 189
162impl Foo { 190impl Foo {
163 fn qux(&self) {} 191 fn qux(&self) {}
164} 192}
165", 193"#,
166"struct Foo {} 194 r#"
195struct Foo {}
167 196
168impl Foo { 197impl Foo {
169 fn $0new() -> Self { Self { } } 198 fn $0new() -> Self { Self { } }
170 199
171 fn qux(&self) {} 200 fn qux(&self) {}
172} 201}
173", 202"#,
174 ); 203 );
175 204
176 check_assist( 205 check_assist(
177 generate_new, 206 generate_new,
178"struct Foo {$0} 207 r#"
208struct Foo {$0}
179 209
180impl Foo { 210impl Foo {
181 fn qux(&self) {} 211 fn qux(&self) {}
@@ -183,8 +213,9 @@ impl Foo {
183 5 213 5
184 } 214 }
185} 215}
186", 216"#,
187"struct Foo {} 217 r#"
218struct Foo {}
188 219
189impl Foo { 220impl Foo {
190 fn $0new() -> Self { Self { } } 221 fn $0new() -> Self { Self { } }
@@ -194,27 +225,37 @@ impl Foo {
194 5 225 5
195 } 226 }
196} 227}
197", 228"#,
198 ); 229 );
230 }
199 231
200 // Check visibility of new fn based on struct 232 #[test]
233 fn check_visibility_of_new_fn_based_on_struct() {
201 check_assist( 234 check_assist(
202 generate_new, 235 generate_new,
203"pub struct Foo {$0}", 236 r#"
204"pub struct Foo {} 237pub struct Foo {$0}
238"#,
239 r#"
240pub struct Foo {}
205 241
206impl Foo { 242impl Foo {
207 pub fn $0new() -> Self { Self { } } 243 pub fn $0new() -> Self { Self { } }
208}", 244}
245"#,
209 ); 246 );
210 check_assist( 247 check_assist(
211 generate_new, 248 generate_new,
212"pub(crate) struct Foo {$0}", 249 r#"
213"pub(crate) struct Foo {} 250pub(crate) struct Foo {$0}
251"#,
252 r#"
253pub(crate) struct Foo {}
214 254
215impl Foo { 255impl Foo {
216 pub(crate) fn $0new() -> Self { Self { } } 256 pub(crate) fn $0new() -> Self { Self { } }
217}", 257}
258"#,
218 ); 259 );
219 } 260 }
220 261
@@ -222,26 +263,28 @@ impl Foo {
222 fn generate_new_not_applicable_if_fn_exists() { 263 fn generate_new_not_applicable_if_fn_exists() {
223 check_assist_not_applicable( 264 check_assist_not_applicable(
224 generate_new, 265 generate_new,
225 " 266 r#"
226struct Foo {$0} 267struct Foo {$0}
227 268
228impl Foo { 269impl Foo {
229 fn new() -> Self { 270 fn new() -> Self {
230 Self 271 Self
231 } 272 }
232}", 273}
274"#,
233 ); 275 );
234 276
235 check_assist_not_applicable( 277 check_assist_not_applicable(
236 generate_new, 278 generate_new,
237 " 279 r#"
238struct Foo {$0} 280struct Foo {$0}
239 281
240impl Foo { 282impl Foo {
241 fn New() -> Self { 283 fn New() -> Self {
242 Self 284 Self
243 } 285 }
244}", 286}
287"#,
245 ); 288 );
246 } 289 }
247 290
@@ -249,12 +292,12 @@ impl Foo {
249 fn generate_new_target() { 292 fn generate_new_target() {
250 check_assist_target( 293 check_assist_target(
251 generate_new, 294 generate_new,
252 " 295 r#"
253struct SomeThingIrrelevant; 296struct SomeThingIrrelevant;
254/// Has a lifetime parameter 297/// Has a lifetime parameter
255struct Foo<'a, T: Foo<'a>> {$0} 298struct Foo<'a, T: Foo<'a>> {$0}
256struct EvenMoreIrrelevant; 299struct EvenMoreIrrelevant;
257", 300"#,
258 "/// Has a lifetime parameter 301 "/// Has a lifetime parameter
259struct Foo<'a, T: Foo<'a>> {}", 302struct Foo<'a, T: Foo<'a>> {}",
260 ); 303 );
@@ -264,7 +307,7 @@ struct Foo<'a, T: Foo<'a>> {}",
264 fn test_unrelated_new() { 307 fn test_unrelated_new() {
265 check_assist( 308 check_assist(
266 generate_new, 309 generate_new,
267 r##" 310 r#"
268pub struct AstId<N: AstNode> { 311pub struct AstId<N: AstNode> {
269 file_id: HirFileId, 312 file_id: HirFileId,
270 file_ast_id: FileAstId<N>, 313 file_ast_id: FileAstId<N>,
@@ -285,8 +328,9 @@ impl<T> Source<T> {
285 pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> { 328 pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
286 Source { file_id: self.file_id, ast: f(self.ast) } 329 Source { file_id: self.file_id, ast: f(self.ast) }
287 } 330 }
288}"##, 331}
289 r##" 332"#,
333 r#"
290pub struct AstId<N: AstNode> { 334pub struct AstId<N: AstNode> {
291 file_id: HirFileId, 335 file_id: HirFileId,
292 file_ast_id: FileAstId<N>, 336 file_ast_id: FileAstId<N>,
@@ -309,7 +353,8 @@ impl<T> Source<T> {
309 pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> { 353 pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
310 Source { file_id: self.file_id, ast: f(self.ast) } 354 Source { file_id: self.file_id, ast: f(self.ast) }
311 } 355 }
312}"##, 356}
357"#,
313 ); 358 );
314 } 359 }
315} 360}
diff --git a/crates/ide_assists/src/handlers/introduce_named_lifetime.rs b/crates/ide_assists/src/handlers/introduce_named_lifetime.rs
index 68bc15120..16f8f9d70 100644
--- a/crates/ide_assists/src/handlers/introduce_named_lifetime.rs
+++ b/crates/ide_assists/src/handlers/introduce_named_lifetime.rs
@@ -84,8 +84,8 @@ fn generate_fn_def_assist(
84 } 84 }
85 }; 85 };
86 acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| { 86 acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| {
87 let fn_def = builder.make_ast_mut(fn_def); 87 let fn_def = builder.make_mut(fn_def);
88 let lifetime = builder.make_ast_mut(lifetime); 88 let lifetime = builder.make_mut(lifetime);
89 let loc_needing_lifetime = 89 let loc_needing_lifetime =
90 loc_needing_lifetime.and_then(|it| it.make_mut(builder).to_position()); 90 loc_needing_lifetime.and_then(|it| it.make_mut(builder).to_position());
91 91
@@ -107,8 +107,8 @@ fn generate_impl_def_assist(
107) -> Option<()> { 107) -> Option<()> {
108 let new_lifetime_param = generate_unique_lifetime_param_name(impl_def.generic_param_list())?; 108 let new_lifetime_param = generate_unique_lifetime_param_name(impl_def.generic_param_list())?;
109 acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| { 109 acc.add(AssistId(ASSIST_NAME, AssistKind::Refactor), ASSIST_LABEL, lifetime_loc, |builder| {
110 let impl_def = builder.make_ast_mut(impl_def); 110 let impl_def = builder.make_mut(impl_def);
111 let lifetime = builder.make_ast_mut(lifetime); 111 let lifetime = builder.make_mut(lifetime);
112 112
113 impl_def.get_or_create_generic_param_list().add_generic_param( 113 impl_def.get_or_create_generic_param_list().add_generic_param(
114 make::lifetime_param(new_lifetime_param.clone()).clone_for_update().into(), 114 make::lifetime_param(new_lifetime_param.clone()).clone_for_update().into(),
@@ -141,8 +141,8 @@ enum NeedsLifetime {
141impl NeedsLifetime { 141impl NeedsLifetime {
142 fn make_mut(self, builder: &mut AssistBuilder) -> Self { 142 fn make_mut(self, builder: &mut AssistBuilder) -> Self {
143 match self { 143 match self {
144 Self::SelfParam(it) => Self::SelfParam(builder.make_ast_mut(it)), 144 Self::SelfParam(it) => Self::SelfParam(builder.make_mut(it)),
145 Self::RefType(it) => Self::RefType(builder.make_ast_mut(it)), 145 Self::RefType(it) => Self::RefType(builder.make_mut(it)),
146 } 146 }
147 } 147 }
148 148
diff --git a/crates/ide_assists/src/handlers/merge_imports.rs b/crates/ide_assists/src/handlers/merge_imports.rs
index 3cd090737..31854840c 100644
--- a/crates/ide_assists/src/handlers/merge_imports.rs
+++ b/crates/ide_assists/src/handlers/merge_imports.rs
@@ -47,16 +47,16 @@ pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext) -> Option<()
47 target, 47 target,
48 |builder| { 48 |builder| {
49 if let Some((to_replace, replacement, to_remove)) = imports { 49 if let Some((to_replace, replacement, to_remove)) = imports {
50 let to_replace = builder.make_ast_mut(to_replace); 50 let to_replace = builder.make_mut(to_replace);
51 let to_remove = builder.make_ast_mut(to_remove); 51 let to_remove = builder.make_mut(to_remove);
52 52
53 ted::replace(to_replace.syntax(), replacement.syntax()); 53 ted::replace(to_replace.syntax(), replacement.syntax());
54 to_remove.remove(); 54 to_remove.remove();
55 } 55 }
56 56
57 if let Some((to_replace, replacement, to_remove)) = uses { 57 if let Some((to_replace, replacement, to_remove)) = uses {
58 let to_replace = builder.make_ast_mut(to_replace); 58 let to_replace = builder.make_mut(to_replace);
59 let to_remove = builder.make_ast_mut(to_remove); 59 let to_remove = builder.make_mut(to_remove);
60 60
61 ted::replace(to_replace.syntax(), replacement.syntax()); 61 ted::replace(to_replace.syntax(), replacement.syntax());
62 to_remove.remove() 62 to_remove.remove()
diff --git a/crates/ide_assists/src/handlers/move_bounds.rs b/crates/ide_assists/src/handlers/move_bounds.rs
index fa3f76609..d89d11bdf 100644
--- a/crates/ide_assists/src/handlers/move_bounds.rs
+++ b/crates/ide_assists/src/handlers/move_bounds.rs
@@ -36,8 +36,8 @@ pub(crate) fn move_bounds_to_where_clause(acc: &mut Assists, ctx: &AssistContext
36 "Move to where clause", 36 "Move to where clause",
37 target, 37 target,
38 |edit| { 38 |edit| {
39 let type_param_list = edit.make_ast_mut(type_param_list); 39 let type_param_list = edit.make_mut(type_param_list);
40 let parent = edit.make_mut(parent); 40 let parent = edit.make_syntax_mut(parent);
41 41
42 let where_clause: ast::WhereClause = match_ast! { 42 let where_clause: ast::WhereClause = match_ast! {
43 match parent { 43 match parent {
diff --git a/crates/ide_assists/src/handlers/move_module_to_file.rs b/crates/ide_assists/src/handlers/move_module_to_file.rs
index 6e685b4b2..93f702c55 100644
--- a/crates/ide_assists/src/handlers/move_module_to_file.rs
+++ b/crates/ide_assists/src/handlers/move_module_to_file.rs
@@ -1,4 +1,4 @@
1use ast::{edit::IndentLevel, VisibilityOwner}; 1use ast::edit::IndentLevel;
2use ide_db::base_db::AnchoredPathBuf; 2use ide_db::base_db::AnchoredPathBuf;
3use stdx::format_to; 3use stdx::format_to;
4use syntax::{ 4use syntax::{
@@ -60,12 +60,18 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext) -> Opt
60 }; 60 };
61 61
62 let mut buf = String::new(); 62 let mut buf = String::new();
63 if let Some(v) = module_ast.visibility() {
64 format_to!(buf, "{} ", v);
65 }
66 format_to!(buf, "mod {};", module_name); 63 format_to!(buf, "mod {};", module_name);
67 64
68 builder.replace(module_ast.syntax().text_range(), buf); 65 let replacement_start = if let Some(mod_token) = module_ast.mod_token() {
66 mod_token.text_range().start()
67 } else {
68 module_ast.syntax().text_range().start()
69 };
70
71 builder.replace(
72 TextRange::new(replacement_start, module_ast.syntax().text_range().end()),
73 buf,
74 );
69 75
70 let dst = AnchoredPathBuf { anchor: ctx.frange.file_id, path }; 76 let dst = AnchoredPathBuf { anchor: ctx.frange.file_id, path };
71 builder.create_file(dst, contents); 77 builder.create_file(dst, contents);
@@ -184,4 +190,26 @@ pub(crate) mod tests;
184 cov_mark::check!(available_before_curly); 190 cov_mark::check!(available_before_curly);
185 check_assist_not_applicable(move_module_to_file, r#"mod m { $0 }"#); 191 check_assist_not_applicable(move_module_to_file, r#"mod m { $0 }"#);
186 } 192 }
193
194 #[test]
195 fn keep_outer_comments_and_attributes() {
196 check_assist(
197 move_module_to_file,
198 r#"
199/// doc comment
200#[attribute]
201mod $0tests {
202 #[test] fn t() {}
203}
204"#,
205 r#"
206//- /main.rs
207/// doc comment
208#[attribute]
209mod tests;
210//- /tests.rs
211#[test] fn t() {}
212"#,
213 );
214 }
187} 215}
diff --git a/crates/ide_assists/src/handlers/pull_assignment_up.rs b/crates/ide_assists/src/handlers/pull_assignment_up.rs
index 3128faa68..f07b8a6c0 100644
--- a/crates/ide_assists/src/handlers/pull_assignment_up.rs
+++ b/crates/ide_assists/src/handlers/pull_assignment_up.rs
@@ -74,10 +74,10 @@ pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext) -> Opti
74 let assignments: Vec<_> = collector 74 let assignments: Vec<_> = collector
75 .assignments 75 .assignments
76 .into_iter() 76 .into_iter()
77 .map(|(stmt, rhs)| (edit.make_ast_mut(stmt), rhs.clone_for_update())) 77 .map(|(stmt, rhs)| (edit.make_mut(stmt), rhs.clone_for_update()))
78 .collect(); 78 .collect();
79 79
80 let tgt = edit.make_ast_mut(tgt); 80 let tgt = edit.make_mut(tgt);
81 81
82 for (stmt, rhs) in assignments { 82 for (stmt, rhs) in assignments {
83 let mut stmt = stmt.syntax().clone(); 83 let mut stmt = stmt.syntax().clone();
diff --git a/crates/ide_assists/src/handlers/raw_string.rs b/crates/ide_assists/src/handlers/raw_string.rs
index d0f1613f3..d98a55ae4 100644
--- a/crates/ide_assists/src/handlers/raw_string.rs
+++ b/crates/ide_assists/src/handlers/raw_string.rs
@@ -1,6 +1,6 @@
1use std::borrow::Cow; 1use std::borrow::Cow;
2 2
3use syntax::{ast, AstToken, TextRange, TextSize}; 3use syntax::{ast, ast::IsString, AstToken, TextRange, TextSize};
4 4
5use crate::{AssistContext, AssistId, AssistKind, Assists}; 5use crate::{AssistContext, AssistId, AssistKind, Assists};
6 6
diff --git a/crates/ide_assists/src/handlers/reorder_fields.rs b/crates/ide_assists/src/handlers/reorder_fields.rs
index e90bbdbcf..933acead1 100644
--- a/crates/ide_assists/src/handlers/reorder_fields.rs
+++ b/crates/ide_assists/src/handlers/reorder_fields.rs
@@ -70,10 +70,10 @@ pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext) -> Option<(
70 target, 70 target,
71 |builder| match fields { 71 |builder| match fields {
72 Either::Left((sorted, field_list)) => { 72 Either::Left((sorted, field_list)) => {
73 replace(builder.make_ast_mut(field_list).fields(), sorted) 73 replace(builder.make_mut(field_list).fields(), sorted)
74 } 74 }
75 Either::Right((sorted, field_list)) => { 75 Either::Right((sorted, field_list)) => {
76 replace(builder.make_ast_mut(field_list).fields(), sorted) 76 replace(builder.make_mut(field_list).fields(), sorted)
77 } 77 }
78 }, 78 },
79 ) 79 )
diff --git a/crates/ide_assists/src/handlers/reorder_impl.rs b/crates/ide_assists/src/handlers/reorder_impl.rs
index 54a9a468e..5a6a9f158 100644
--- a/crates/ide_assists/src/handlers/reorder_impl.rs
+++ b/crates/ide_assists/src/handlers/reorder_impl.rs
@@ -79,8 +79,7 @@ pub(crate) fn reorder_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
79 "Sort methods", 79 "Sort methods",
80 target, 80 target,
81 |builder| { 81 |builder| {
82 let methods = 82 let methods = methods.into_iter().map(|fn_| builder.make_mut(fn_)).collect::<Vec<_>>();
83 methods.into_iter().map(|fn_| builder.make_ast_mut(fn_)).collect::<Vec<_>>();
84 methods 83 methods
85 .into_iter() 84 .into_iter()
86 .zip(sorted) 85 .zip(sorted)
diff --git a/crates/ide_assists/src/handlers/replace_impl_trait_with_generic.rs b/crates/ide_assists/src/handlers/replace_impl_trait_with_generic.rs
index 15420aedf..540a905cc 100644
--- a/crates/ide_assists/src/handlers/replace_impl_trait_with_generic.rs
+++ b/crates/ide_assists/src/handlers/replace_impl_trait_with_generic.rs
@@ -32,8 +32,8 @@ pub(crate) fn replace_impl_trait_with_generic(
32 "Replace impl trait with generic", 32 "Replace impl trait with generic",
33 target, 33 target,
34 |edit| { 34 |edit| {
35 let impl_trait_type = edit.make_ast_mut(impl_trait_type); 35 let impl_trait_type = edit.make_mut(impl_trait_type);
36 let fn_ = edit.make_ast_mut(fn_); 36 let fn_ = edit.make_mut(fn_);
37 37
38 let type_param_name = suggest_name::for_generic_parameter(&impl_trait_type); 38 let type_param_name = suggest_name::for_generic_parameter(&impl_trait_type);
39 39
diff --git a/crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs b/crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs
index 99ba79860..39f5eb4ff 100644
--- a/crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs
+++ b/crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs
@@ -40,7 +40,7 @@ pub(crate) fn replace_qualified_name_with_use(
40 |builder| { 40 |builder| {
41 // Now that we've brought the name into scope, re-qualify all paths that could be 41 // Now that we've brought the name into scope, re-qualify all paths that could be
42 // affected (that is, all paths inside the node we added the `use` to). 42 // affected (that is, all paths inside the node we added the `use` to).
43 let syntax = builder.make_mut(syntax.clone()); 43 let syntax = builder.make_syntax_mut(syntax.clone());
44 if let Some(ref import_scope) = ImportScope::from(syntax.clone()) { 44 if let Some(ref import_scope) = ImportScope::from(syntax.clone()) {
45 shorten_paths(&syntax, &path.clone_for_update()); 45 shorten_paths(&syntax, &path.clone_for_update());
46 insert_use(import_scope, path, ctx.config.insert_use); 46 insert_use(import_scope, path, ctx.config.insert_use);
diff --git a/crates/ide_assists/src/handlers/replace_string_with_char.rs b/crates/ide_assists/src/handlers/replace_string_with_char.rs
index 634b9c0b7..0800d291e 100644
--- a/crates/ide_assists/src/handlers/replace_string_with_char.rs
+++ b/crates/ide_assists/src/handlers/replace_string_with_char.rs
@@ -1,4 +1,4 @@
1use syntax::{ast, AstToken, SyntaxKind::STRING}; 1use syntax::{ast, ast::IsString, AstToken, SyntaxKind::STRING};
2 2
3use crate::{AssistContext, AssistId, AssistKind, Assists}; 3use crate::{AssistContext, AssistId, AssistKind, Assists};
4 4
diff --git a/crates/ide_assists/src/handlers/replace_unwrap_with_match.rs b/crates/ide_assists/src/handlers/replace_unwrap_with_match.rs
index 0fec961b4..a3bfa221c 100644
--- a/crates/ide_assists/src/handlers/replace_unwrap_with_match.rs
+++ b/crates/ide_assists/src/handlers/replace_unwrap_with_match.rs
@@ -17,7 +17,7 @@ use ide_db::ty_filter::TryEnum;
17 17
18// Assist: replace_unwrap_with_match 18// Assist: replace_unwrap_with_match
19// 19//
20// Replaces `unwrap` a `match` expression. Works for Result and Option. 20// Replaces `unwrap` with a `match` expression. Works for Result and Option.
21// 21//
22// ``` 22// ```
23// enum Result<T, E> { Ok(T), Err(E) } 23// enum Result<T, E> { Ok(T), Err(E) }
diff --git a/crates/ide_assists/src/tests/generated.rs b/crates/ide_assists/src/tests/generated.rs
index 49c1a9776..4406406a2 100644
--- a/crates/ide_assists/src/tests/generated.rs
+++ b/crates/ide_assists/src/tests/generated.rs
@@ -50,7 +50,6 @@ trait Trait {
50impl Trait for () { 50impl Trait for () {
51 type X = (); 51 type X = ();
52 fn foo(&self) {}$0 52 fn foo(&self) {}$0
53
54} 53}
55"#####, 54"#####,
56 r#####" 55 r#####"
diff --git a/crates/ide_assists/src/utils.rs b/crates/ide_assists/src/utils.rs
index 0dcf20c61..fc7caee04 100644
--- a/crates/ide_assists/src/utils.rs
+++ b/crates/ide_assists/src/utils.rs
@@ -17,7 +17,7 @@ use syntax::{
17 ast::AttrsOwner, 17 ast::AttrsOwner,
18 ast::NameOwner, 18 ast::NameOwner,
19 ast::{self, edit, make, ArgListOwner, GenericParamsOwner}, 19 ast::{self, edit, make, ArgListOwner, GenericParamsOwner},
20 AstNode, Direction, SmolStr, 20 ted, AstNode, Direction, SmolStr,
21 SyntaxKind::*, 21 SyntaxKind::*,
22 SyntaxNode, TextSize, T, 22 SyntaxNode, TextSize, T,
23}; 23};
@@ -128,43 +128,43 @@ pub fn add_trait_assoc_items_to_impl(
128 sema: &hir::Semantics<ide_db::RootDatabase>, 128 sema: &hir::Semantics<ide_db::RootDatabase>,
129 items: Vec<ast::AssocItem>, 129 items: Vec<ast::AssocItem>,
130 trait_: hir::Trait, 130 trait_: hir::Trait,
131 impl_def: ast::Impl, 131 impl_: ast::Impl,
132 target_scope: hir::SemanticsScope, 132 target_scope: hir::SemanticsScope,
133) -> (ast::Impl, ast::AssocItem) { 133) -> (ast::Impl, ast::AssocItem) {
134 let impl_item_list = impl_def.assoc_item_list().unwrap_or_else(make::assoc_item_list);
135
136 let n_existing_items = impl_item_list.assoc_items().count();
137 let source_scope = sema.scope_for_def(trait_); 134 let source_scope = sema.scope_for_def(trait_);
138 let ast_transform = QualifyPaths::new(&target_scope, &source_scope) 135 let ast_transform = QualifyPaths::new(&target_scope, &source_scope)
139 .or(SubstituteTypeParams::for_trait_impl(&source_scope, trait_, impl_def.clone())); 136 .or(SubstituteTypeParams::for_trait_impl(&source_scope, trait_, impl_.clone()));
140 137
141 let items = items 138 let items = items
142 .into_iter() 139 .into_iter()
143 .map(|it| it.clone_for_update()) 140 .map(|it| it.clone_for_update())
144 .inspect(|it| ast_transform::apply(&*ast_transform, it)) 141 .inspect(|it| ast_transform::apply(&*ast_transform, it))
145 .map(|it| match it { 142 .map(|it| edit::remove_attrs_and_docs(&it).clone_subtree().clone_for_update());
146 ast::AssocItem::Fn(def) => ast::AssocItem::Fn(add_body(def)), 143
147 ast::AssocItem::TypeAlias(def) => ast::AssocItem::TypeAlias(def.remove_bounds()), 144 let res = impl_.clone_for_update();
148 _ => it, 145
149 }) 146 let assoc_item_list = res.get_or_create_assoc_item_list();
150 .map(|it| edit::remove_attrs_and_docs(&it)); 147 let mut first_item = None;
151 148 for item in items {
152 let new_impl_item_list = impl_item_list.append_items(items); 149 first_item.get_or_insert_with(|| item.clone());
153 let new_impl_def = impl_def.with_assoc_item_list(new_impl_item_list); 150 match &item {
154 let first_new_item = 151 ast::AssocItem::Fn(fn_) if fn_.body().is_none() => {
155 new_impl_def.assoc_item_list().unwrap().assoc_items().nth(n_existing_items).unwrap();
156 return (new_impl_def, first_new_item);
157
158 fn add_body(fn_def: ast::Fn) -> ast::Fn {
159 match fn_def.body() {
160 Some(_) => fn_def,
161 None => {
162 let body = make::block_expr(None, Some(make::ext::expr_todo())) 152 let body = make::block_expr(None, Some(make::ext::expr_todo()))
163 .indent(edit::IndentLevel(1)); 153 .indent(edit::IndentLevel(1));
164 fn_def.with_body(body) 154 ted::replace(fn_.get_or_create_body().syntax(), body.clone_for_update().syntax())
165 } 155 }
156 ast::AssocItem::TypeAlias(type_alias) => {
157 if let Some(type_bound_list) = type_alias.type_bound_list() {
158 type_bound_list.remove()
159 }
160 }
161 _ => {}
166 } 162 }
163
164 assoc_item_list.add_item(item)
167 } 165 }
166
167 (res, first_item.unwrap())
168} 168}
169 169
170#[derive(Clone, Copy, Debug)] 170#[derive(Clone, Copy, Debug)]
diff --git a/crates/ide_completion/src/completions/keyword.rs b/crates/ide_completion/src/completions/keyword.rs
index b635e0ca3..61b667104 100644
--- a/crates/ide_completion/src/completions/keyword.rs
+++ b/crates/ide_completion/src/completions/keyword.rs
@@ -9,22 +9,21 @@ use crate::{CompletionContext, CompletionItem, CompletionItemKind, CompletionKin
9pub(crate) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionContext) { 9pub(crate) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionContext) {
10 // complete keyword "crate" in use stmt 10 // complete keyword "crate" in use stmt
11 let source_range = ctx.source_range(); 11 let source_range = ctx.source_range();
12 let kw_completion = move |text: &str| {
13 let mut item = CompletionItem::new(CompletionKind::Keyword, source_range, text);
14 item.kind(CompletionItemKind::Keyword).insert_text(text);
15 item
16 };
12 17
13 if ctx.use_item_syntax.is_some() { 18 if ctx.use_item_syntax.is_some() {
14 if ctx.path_qual.is_none() { 19 if ctx.path_qual.is_none() {
15 let mut item = CompletionItem::new(CompletionKind::Keyword, source_range, "crate::"); 20 kw_completion("crate::").add_to(acc);
16 item.kind(CompletionItemKind::Keyword).insert_text("crate::");
17 item.add_to(acc);
18 } 21 }
19 let mut item = CompletionItem::new(CompletionKind::Keyword, source_range, "self"); 22 kw_completion("self").add_to(acc);
20 item.kind(CompletionItemKind::Keyword);
21 item.add_to(acc);
22 if iter::successors(ctx.path_qual.clone(), |p| p.qualifier()) 23 if iter::successors(ctx.path_qual.clone(), |p| p.qualifier())
23 .all(|p| p.segment().and_then(|s| s.super_token()).is_some()) 24 .all(|p| p.segment().and_then(|s| s.super_token()).is_some())
24 { 25 {
25 let mut item = CompletionItem::new(CompletionKind::Keyword, source_range, "super::"); 26 kw_completion("super::").add_to(acc);
26 item.kind(CompletionItemKind::Keyword).insert_text("super::");
27 item.add_to(acc);
28 } 27 }
29 } 28 }
30 29
@@ -32,9 +31,8 @@ pub(crate) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionC
32 if let Some(receiver) = &ctx.dot_receiver { 31 if let Some(receiver) = &ctx.dot_receiver {
33 if let Some(ty) = ctx.sema.type_of_expr(receiver) { 32 if let Some(ty) = ctx.sema.type_of_expr(receiver) {
34 if ty.impls_future(ctx.db) { 33 if ty.impls_future(ctx.db) {
35 let mut item = 34 let mut item = kw_completion("await");
36 CompletionItem::new(CompletionKind::Keyword, ctx.source_range(), "await"); 35 item.detail("expr.await");
37 item.kind(CompletionItemKind::Keyword).detail("expr.await").insert_text("await");
38 item.add_to(acc); 36 item.add_to(acc);
39 } 37 }
40 }; 38 };
diff --git a/crates/ide_db/src/helpers/merge_imports.rs b/crates/ide_db/src/helpers/merge_imports.rs
index af2a51a4d..8fb40e837 100644
--- a/crates/ide_db/src/helpers/merge_imports.rs
+++ b/crates/ide_db/src/helpers/merge_imports.rs
@@ -2,8 +2,9 @@
2use std::cmp::Ordering; 2use std::cmp::Ordering;
3 3
4use itertools::{EitherOrBoth, Itertools}; 4use itertools::{EitherOrBoth, Itertools};
5use syntax::ast::{ 5use syntax::{
6 self, edit::AstNodeEdit, make, AstNode, AttrsOwner, PathSegmentKind, VisibilityOwner, 6 ast::{self, make, AstNode, AttrsOwner, PathSegmentKind, VisibilityOwner},
7 ted,
7}; 8};
8 9
9/// What type of merges are allowed. 10/// What type of merges are allowed.
@@ -41,10 +42,12 @@ pub fn try_merge_imports(
41 return None; 42 return None;
42 } 43 }
43 44
45 let lhs = lhs.clone_subtree().clone_for_update();
44 let lhs_tree = lhs.use_tree()?; 46 let lhs_tree = lhs.use_tree()?;
45 let rhs_tree = rhs.use_tree()?; 47 let rhs_tree = rhs.use_tree()?;
46 let merged = try_merge_trees(&lhs_tree, &rhs_tree, merge_behavior)?; 48 let merged = try_merge_trees(&lhs_tree, &rhs_tree, merge_behavior)?;
47 Some(lhs.with_use_tree(merged).clone_for_update()) 49 ted::replace(lhs_tree.syntax(), merged.syntax());
50 Some(lhs)
48} 51}
49 52
50pub fn try_merge_trees( 53pub fn try_merge_trees(
@@ -65,7 +68,7 @@ pub fn try_merge_trees(
65 } else { 68 } else {
66 (lhs.split_prefix(&lhs_prefix), rhs.split_prefix(&rhs_prefix)) 69 (lhs.split_prefix(&lhs_prefix), rhs.split_prefix(&rhs_prefix))
67 }; 70 };
68 recursive_merge(&lhs, &rhs, merge) 71 recursive_merge(&lhs, &rhs, merge).map(|it| it.clone_for_update())
69} 72}
70 73
71/// Recursively "zips" together lhs and rhs. 74/// Recursively "zips" together lhs and rhs.
@@ -78,7 +81,8 @@ fn recursive_merge(
78 .use_tree_list() 81 .use_tree_list()
79 .into_iter() 82 .into_iter()
80 .flat_map(|list| list.use_trees()) 83 .flat_map(|list| list.use_trees())
81 // we use Option here to early return from this function(this is not the same as a `filter` op) 84 // We use Option here to early return from this function(this is not the
85 // same as a `filter` op).
82 .map(|tree| match merge.is_tree_allowed(&tree) { 86 .map(|tree| match merge.is_tree_allowed(&tree) {
83 true => Some(tree), 87 true => Some(tree),
84 false => None, 88 false => None,
@@ -111,8 +115,10 @@ fn recursive_merge(
111 let tree_is_self = |tree: ast::UseTree| { 115 let tree_is_self = |tree: ast::UseTree| {
112 tree.path().as_ref().map(path_is_self).unwrap_or(false) 116 tree.path().as_ref().map(path_is_self).unwrap_or(false)
113 }; 117 };
114 // check if only one of the two trees has a tree list, and whether that then contains `self` or not. 118 // Check if only one of the two trees has a tree list, and
115 // If this is the case we can skip this iteration since the path without the list is already included in the other one via `self` 119 // whether that then contains `self` or not. If this is the
120 // case we can skip this iteration since the path without
121 // the list is already included in the other one via `self`.
116 let tree_contains_self = |tree: &ast::UseTree| { 122 let tree_contains_self = |tree: &ast::UseTree| {
117 tree.use_tree_list() 123 tree.use_tree_list()
118 .map(|tree_list| tree_list.use_trees().any(tree_is_self)) 124 .map(|tree_list| tree_list.use_trees().any(tree_is_self))
@@ -127,9 +133,11 @@ fn recursive_merge(
127 _ => (), 133 _ => (),
128 } 134 }
129 135
130 // glob imports arent part of the use-tree lists so we need to special handle them here as well 136 // Glob imports aren't part of the use-tree lists so we need
131 // this special handling is only required for when we merge a module import into a glob import of said module 137 // to special handle them here as well this special handling
132 // see the `merge_self_glob` or `merge_mod_into_glob` tests 138 // is only required for when we merge a module import into a
139 // glob import of said module see the `merge_self_glob` or
140 // `merge_mod_into_glob` tests.
133 if lhs_t.star_token().is_some() || rhs_t.star_token().is_some() { 141 if lhs_t.star_token().is_some() || rhs_t.star_token().is_some() {
134 *lhs_t = make::use_tree( 142 *lhs_t = make::use_tree(
135 make::path_unqualified(make::path_segment_self()), 143 make::path_unqualified(make::path_segment_self()),
@@ -165,11 +173,11 @@ fn recursive_merge(
165 } 173 }
166 } 174 }
167 175
168 Some(if let Some(old) = lhs.use_tree_list() { 176 let lhs = lhs.clone_subtree().clone_for_update();
169 lhs.replace_descendant(old, make::use_tree_list(use_trees)).clone_for_update() 177 if let Some(old) = lhs.use_tree_list() {
170 } else { 178 ted::replace(old.syntax(), make::use_tree_list(use_trees).syntax().clone_for_update());
171 lhs.clone() 179 }
172 }) 180 ast::UseTree::cast(lhs.syntax().clone_subtree())
173} 181}
174 182
175/// Traverses both paths until they differ, returning the common prefix of both. 183/// Traverses both paths until they differ, returning the common prefix of both.
diff --git a/crates/ide_db/src/symbol_index.rs b/crates/ide_db/src/symbol_index.rs
index da427d686..0f5c4abc4 100644
--- a/crates/ide_db/src/symbol_index.rs
+++ b/crates/ide_db/src/symbol_index.rs
@@ -161,6 +161,11 @@ impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
161// That is, `#` switches from "types" to all symbols, `*` switches from the current 161// That is, `#` switches from "types" to all symbols, `*` switches from the current
162// workspace to dependencies. 162// workspace to dependencies.
163// 163//
164// Note that filtering does not currently work in VSCode due to the editor never
165// sending the special symbols to the language server. Instead, you can configure
166// the filtering via the `rust-analyzer.workspace.symbol.search.scope` and
167// `rust-analyzer.workspace.symbol.search.kind` settings.
168//
164// |=== 169// |===
165// | Editor | Shortcut 170// | Editor | Shortcut
166// 171//
diff --git a/crates/project_model/src/build_data.rs b/crates/project_model/src/build_data.rs
index 7b88dca63..3aa546980 100644
--- a/crates/project_model/src/build_data.rs
+++ b/crates/project_model/src/build_data.rs
@@ -143,6 +143,7 @@ impl WorkspaceBuildData {
143 cmd.env("RA_RUSTC_WRAPPER", "1"); 143 cmd.env("RA_RUSTC_WRAPPER", "1");
144 } 144 }
145 145
146 cmd.current_dir(cargo_toml.parent().unwrap());
146 cmd.args(&["check", "--quiet", "--workspace", "--message-format=json", "--manifest-path"]) 147 cmd.args(&["check", "--quiet", "--workspace", "--message-format=json", "--manifest-path"])
147 .arg(cargo_toml.as_ref()); 148 .arg(cargo_toml.as_ref());
148 149
diff --git a/crates/rust-analyzer/src/caps.rs b/crates/rust-analyzer/src/caps.rs
index 3c87782f2..b2317618a 100644
--- a/crates/rust-analyzer/src/caps.rs
+++ b/crates/rust-analyzer/src/caps.rs
@@ -122,6 +122,7 @@ pub fn server_capabilities(client_caps: &ClientCapabilities) -> ServerCapabiliti
122 "runnables": { 122 "runnables": {
123 "kinds": [ "cargo" ], 123 "kinds": [ "cargo" ],
124 }, 124 },
125 "workspaceSymbolScopeKindFiltering": true,
125 })), 126 })),
126 } 127 }
127} 128}
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index d83670bda..339014fd3 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -24,7 +24,8 @@ use vfs::AbsPathBuf;
24 24
25use crate::{ 25use crate::{
26 caps::completion_item_edit_resolve, diagnostics::DiagnosticsMapConfig, 26 caps::completion_item_edit_resolve, diagnostics::DiagnosticsMapConfig,
27 line_index::OffsetEncoding, lsp_ext::supports_utf8, 27 line_index::OffsetEncoding, lsp_ext::supports_utf8, lsp_ext::WorkspaceSymbolSearchKind,
28 lsp_ext::WorkspaceSymbolSearchScope,
28}; 29};
29 30
30// Defines the server-side configuration of the rust-analyzer. We generate 31// Defines the server-side configuration of the rust-analyzer. We generate
@@ -124,6 +125,13 @@ config_data! {
124 /// These directories will be ignored by rust-analyzer. 125 /// These directories will be ignored by rust-analyzer.
125 files_excludeDirs: Vec<PathBuf> = "[]", 126 files_excludeDirs: Vec<PathBuf> = "[]",
126 127
128 /// Use semantic tokens for strings.
129 ///
130 /// In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
131 /// By disabling semantic tokens for strings, other grammars can be used to highlight
132 /// their contents.
133 highlighting_strings: bool = "true",
134
127 /// Whether to show `Debug` action. Only applies when 135 /// Whether to show `Debug` action. Only applies when
128 /// `#rust-analyzer.hoverActions.enable#` is set. 136 /// `#rust-analyzer.hoverActions.enable#` is set.
129 hoverActions_debug: bool = "true", 137 hoverActions_debug: bool = "true",
@@ -208,6 +216,11 @@ config_data! {
208 /// Advanced option, fully override the command rust-analyzer uses for 216 /// Advanced option, fully override the command rust-analyzer uses for
209 /// formatting. 217 /// formatting.
210 rustfmt_overrideCommand: Option<Vec<String>> = "null", 218 rustfmt_overrideCommand: Option<Vec<String>> = "null",
219
220 /// Workspace symbol search scope.
221 workspace_symbol_search_scope: WorskpaceSymbolSearchScopeDef = "\"workspace\"",
222 /// Workspace symbol search kind.
223 workspace_symbol_search_kind: WorskpaceSymbolSearchKindDef = "\"only_types\"",
211 } 224 }
212} 225}
213 226
@@ -302,6 +315,15 @@ pub struct RunnablesConfig {
302 pub cargo_extra_args: Vec<String>, 315 pub cargo_extra_args: Vec<String>,
303} 316}
304 317
318/// Configuration for workspace symbol search requests.
319#[derive(Debug, Clone)]
320pub struct WorkspaceSymbolConfig {
321 /// In what scope should the symbol be searched in.
322 pub search_scope: WorkspaceSymbolSearchScope,
323 /// What kind of symbol is being search for.
324 pub search_kind: WorkspaceSymbolSearchKind,
325}
326
305impl Config { 327impl Config {
306 pub fn new(root_path: AbsPathBuf, caps: ClientCapabilities) -> Self { 328 pub fn new(root_path: AbsPathBuf, caps: ClientCapabilities) -> Self {
307 Config { caps, data: ConfigData::default(), discovered_projects: None, root_path } 329 Config { caps, data: ConfigData::default(), discovered_projects: None, root_path }
@@ -655,6 +677,9 @@ impl Config {
655 refs: self.data.lens_enable && self.data.lens_references, 677 refs: self.data.lens_enable && self.data.lens_references,
656 } 678 }
657 } 679 }
680 pub fn highlighting_strings(&self) -> bool {
681 self.data.highlighting_strings
682 }
658 pub fn hover(&self) -> HoverConfig { 683 pub fn hover(&self) -> HoverConfig {
659 HoverConfig { 684 HoverConfig {
660 implementations: self.data.hoverActions_enable 685 implementations: self.data.hoverActions_enable
@@ -677,6 +702,22 @@ impl Config {
677 .contains(&MarkupKind::Markdown), 702 .contains(&MarkupKind::Markdown),
678 } 703 }
679 } 704 }
705
706 pub fn workspace_symbol(&self) -> WorkspaceSymbolConfig {
707 WorkspaceSymbolConfig {
708 search_scope: match self.data.workspace_symbol_search_scope {
709 WorskpaceSymbolSearchScopeDef::Workspace => WorkspaceSymbolSearchScope::Workspace,
710 WorskpaceSymbolSearchScopeDef::WorkspaceAndDependencies => {
711 WorkspaceSymbolSearchScope::WorkspaceAndDependencies
712 }
713 },
714 search_kind: match self.data.workspace_symbol_search_kind {
715 WorskpaceSymbolSearchKindDef::OnlyTypes => WorkspaceSymbolSearchKind::OnlyTypes,
716 WorskpaceSymbolSearchKindDef::AllSymbols => WorkspaceSymbolSearchKind::AllSymbols,
717 },
718 }
719 }
720
680 pub fn semantic_tokens_refresh(&self) -> bool { 721 pub fn semantic_tokens_refresh(&self) -> bool {
681 try_or!(self.caps.workspace.as_ref()?.semantic_tokens.as_ref()?.refresh_support?, false) 722 try_or!(self.caps.workspace.as_ref()?.semantic_tokens.as_ref()?.refresh_support?, false)
682 } 723 }
@@ -723,6 +764,20 @@ enum ImportPrefixDef {
723 ByCrate, 764 ByCrate,
724} 765}
725 766
767#[derive(Deserialize, Debug, Clone)]
768#[serde(rename_all = "snake_case")]
769enum WorskpaceSymbolSearchScopeDef {
770 Workspace,
771 WorkspaceAndDependencies,
772}
773
774#[derive(Deserialize, Debug, Clone)]
775#[serde(rename_all = "snake_case")]
776enum WorskpaceSymbolSearchKindDef {
777 OnlyTypes,
778 AllSymbols,
779}
780
726macro_rules! _config_data { 781macro_rules! _config_data {
727 (struct $name:ident { 782 (struct $name:ident {
728 $( 783 $(
@@ -893,6 +948,22 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
893 "type": "array", 948 "type": "array",
894 "items": { "type": ["string", "object"] }, 949 "items": { "type": ["string", "object"] },
895 }, 950 },
951 "WorskpaceSymbolSearchScopeDef" => set! {
952 "type": "string",
953 "enum": ["workspace", "workspace_and_dependencies"],
954 "enumDescriptions": [
955 "Search in current workspace only",
956 "Search in current workspace and dependencies"
957 ],
958 },
959 "WorskpaceSymbolSearchKindDef" => set! {
960 "type": "string",
961 "enum": ["only_types", "all_symbols"],
962 "enumDescriptions": [
963 "Search for types only",
964 "Search for all symbols kinds"
965 ],
966 },
896 _ => panic!("{}: {}", ty, default), 967 _ => panic!("{}: {}", ty, default),
897 } 968 }
898 969
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs
index 551013aa9..51041d7a0 100644
--- a/crates/rust-analyzer/src/handlers.rs
+++ b/crates/rust-analyzer/src/handlers.rs
@@ -38,7 +38,7 @@ use crate::{
38 from_proto, 38 from_proto,
39 global_state::{GlobalState, GlobalStateSnapshot}, 39 global_state::{GlobalState, GlobalStateSnapshot},
40 line_index::LineEndings, 40 line_index::LineEndings,
41 lsp_ext::{self, InlayHint, InlayHintsParams}, 41 lsp_ext::{self, InlayHint, InlayHintsParams, WorkspaceSymbolParams},
42 lsp_utils::all_edits_are_disjoint, 42 lsp_utils::all_edits_are_disjoint,
43 to_proto, LspError, Result, 43 to_proto, LspError, Result,
44}; 44};
@@ -380,11 +380,12 @@ pub(crate) fn handle_document_symbol(
380 380
381pub(crate) fn handle_workspace_symbol( 381pub(crate) fn handle_workspace_symbol(
382 snap: GlobalStateSnapshot, 382 snap: GlobalStateSnapshot,
383 params: lsp_types::WorkspaceSymbolParams, 383 params: WorkspaceSymbolParams,
384) -> Result<Option<Vec<SymbolInformation>>> { 384) -> Result<Option<Vec<SymbolInformation>>> {
385 let _p = profile::span("handle_workspace_symbol"); 385 let _p = profile::span("handle_workspace_symbol");
386 let all_symbols = params.query.contains('#'); 386
387 let libs = params.query.contains('*'); 387 let (all_symbols, libs) = decide_search_scope_and_kind(&params, &snap);
388
388 let query = { 389 let query = {
389 let query: String = params.query.chars().filter(|&c| c != '#' && c != '*').collect(); 390 let query: String = params.query.chars().filter(|&c| c != '#' && c != '*').collect();
390 let mut q = Query::new(query); 391 let mut q = Query::new(query);
@@ -406,6 +407,45 @@ pub(crate) fn handle_workspace_symbol(
406 407
407 return Ok(Some(res)); 408 return Ok(Some(res));
408 409
410 fn decide_search_scope_and_kind(
411 params: &WorkspaceSymbolParams,
412 snap: &GlobalStateSnapshot,
413 ) -> (bool, bool) {
414 // Support old-style parsing of markers in the query.
415 let mut all_symbols = params.query.contains('#');
416 let mut libs = params.query.contains('*');
417
418 let config = snap.config.workspace_symbol();
419
420 // If no explicit marker was set, check request params. If that's also empty
421 // use global config.
422 if !all_symbols {
423 let search_kind = if let Some(ref search_kind) = params.search_kind {
424 search_kind
425 } else {
426 &config.search_kind
427 };
428 all_symbols = match search_kind {
429 lsp_ext::WorkspaceSymbolSearchKind::OnlyTypes => false,
430 lsp_ext::WorkspaceSymbolSearchKind::AllSymbols => true,
431 }
432 }
433
434 if !libs {
435 let search_scope = if let Some(ref search_scope) = params.search_scope {
436 search_scope
437 } else {
438 &config.search_scope
439 };
440 libs = match search_scope {
441 lsp_ext::WorkspaceSymbolSearchScope::Workspace => false,
442 lsp_ext::WorkspaceSymbolSearchScope::WorkspaceAndDependencies => true,
443 }
444 }
445
446 (all_symbols, libs)
447 }
448
409 fn exec_query(snap: &GlobalStateSnapshot, query: Query) -> Result<Vec<SymbolInformation>> { 449 fn exec_query(snap: &GlobalStateSnapshot, query: Query) -> Result<Vec<SymbolInformation>> {
410 let mut res = Vec::new(); 450 let mut res = Vec::new();
411 for nav in snap.analysis.symbol_search(query)? { 451 for nav in snap.analysis.symbol_search(query)? {
@@ -1394,7 +1434,9 @@ pub(crate) fn handle_semantic_tokens_full(
1394 let line_index = snap.file_line_index(file_id)?; 1434 let line_index = snap.file_line_index(file_id)?;
1395 1435
1396 let highlights = snap.analysis.highlight(file_id)?; 1436 let highlights = snap.analysis.highlight(file_id)?;
1397 let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); 1437 let highlight_strings = snap.config.highlighting_strings();
1438 let semantic_tokens =
1439 to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
1398 1440
1399 // Unconditionally cache the tokens 1441 // Unconditionally cache the tokens
1400 snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone()); 1442 snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
@@ -1413,8 +1455,9 @@ pub(crate) fn handle_semantic_tokens_full_delta(
1413 let line_index = snap.file_line_index(file_id)?; 1455 let line_index = snap.file_line_index(file_id)?;
1414 1456
1415 let highlights = snap.analysis.highlight(file_id)?; 1457 let highlights = snap.analysis.highlight(file_id)?;
1416 1458 let highlight_strings = snap.config.highlighting_strings();
1417 let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); 1459 let semantic_tokens =
1460 to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
1418 1461
1419 let mut cache = snap.semantic_tokens_cache.lock(); 1462 let mut cache = snap.semantic_tokens_cache.lock();
1420 let cached_tokens = cache.entry(params.text_document.uri).or_default(); 1463 let cached_tokens = cache.entry(params.text_document.uri).or_default();
@@ -1443,7 +1486,9 @@ pub(crate) fn handle_semantic_tokens_range(
1443 let line_index = snap.file_line_index(frange.file_id)?; 1486 let line_index = snap.file_line_index(frange.file_id)?;
1444 1487
1445 let highlights = snap.analysis.highlight_range(frange)?; 1488 let highlights = snap.analysis.highlight_range(frange)?;
1446 let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); 1489 let highlight_strings = snap.config.highlighting_strings();
1490 let semantic_tokens =
1491 to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
1447 Ok(Some(semantic_tokens.into())) 1492 Ok(Some(semantic_tokens.into()))
1448} 1493}
1449 1494
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs
index 56de9681c..ba2790acb 100644
--- a/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -123,7 +123,7 @@ fn integrated_completion_benchmark() {
123 }; 123 };
124 124
125 { 125 {
126 let _it = stdx::timeit("unqualified path completion"); 126 let _p = profile::span("unqualified path completion");
127 let _span = profile::cpu_span(); 127 let _span = profile::cpu_span();
128 let analysis = host.analysis(); 128 let analysis = host.analysis();
129 let config = CompletionConfig { 129 let config = CompletionConfig {
@@ -156,7 +156,7 @@ fn integrated_completion_benchmark() {
156 }; 156 };
157 157
158 { 158 {
159 let _it = stdx::timeit("dot completion"); 159 let _p = profile::span("dot completion");
160 let _span = profile::cpu_span(); 160 let _span = profile::cpu_span();
161 let analysis = host.analysis(); 161 let analysis = host.analysis();
162 let config = CompletionConfig { 162 let config = CompletionConfig {
diff --git a/crates/rust-analyzer/src/lsp_ext.rs b/crates/rust-analyzer/src/lsp_ext.rs
index 3bd098058..34b53a7a8 100644
--- a/crates/rust-analyzer/src/lsp_ext.rs
+++ b/crates/rust-analyzer/src/lsp_ext.rs
@@ -4,7 +4,8 @@ use std::{collections::HashMap, path::PathBuf};
4 4
5use lsp_types::request::Request; 5use lsp_types::request::Request;
6use lsp_types::{ 6use lsp_types::{
7 notification::Notification, CodeActionKind, Position, Range, TextDocumentIdentifier, 7 notification::Notification, CodeActionKind, PartialResultParams, Position, Range,
8 TextDocumentIdentifier, WorkDoneProgressParams,
8}; 9};
9use serde::{Deserialize, Serialize}; 10use serde::{Deserialize, Serialize};
10 11
@@ -438,3 +439,42 @@ pub enum MoveItemDirection {
438 Up, 439 Up,
439 Down, 440 Down,
440} 441}
442
443#[derive(Debug)]
444pub enum WorkspaceSymbol {}
445
446impl Request for WorkspaceSymbol {
447 type Params = WorkspaceSymbolParams;
448 type Result = Option<Vec<lsp_types::SymbolInformation>>;
449 const METHOD: &'static str = "workspace/symbol";
450}
451
452#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
453pub struct WorkspaceSymbolParams {
454 #[serde(flatten)]
455 pub partial_result_params: PartialResultParams,
456
457 #[serde(flatten)]
458 pub work_done_progress_params: WorkDoneProgressParams,
459
460 /// A non-empty query string
461 pub query: String,
462
463 pub search_scope: Option<WorkspaceSymbolSearchScope>,
464
465 pub search_kind: Option<WorkspaceSymbolSearchKind>,
466}
467
468#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
469#[serde(rename_all = "camelCase")]
470pub enum WorkspaceSymbolSearchScope {
471 Workspace,
472 WorkspaceAndDependencies,
473}
474
475#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
476#[serde(rename_all = "camelCase")]
477pub enum WorkspaceSymbolSearchKind {
478 OnlyTypes,
479 AllSymbols,
480}
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index c7bd7eee1..4e0791611 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -525,9 +525,9 @@ impl GlobalState {
525 .on::<lsp_ext::ExternalDocs>(handlers::handle_open_docs) 525 .on::<lsp_ext::ExternalDocs>(handlers::handle_open_docs)
526 .on::<lsp_ext::OpenCargoToml>(handlers::handle_open_cargo_toml) 526 .on::<lsp_ext::OpenCargoToml>(handlers::handle_open_cargo_toml)
527 .on::<lsp_ext::MoveItem>(handlers::handle_move_item) 527 .on::<lsp_ext::MoveItem>(handlers::handle_move_item)
528 .on::<lsp_ext::WorkspaceSymbol>(handlers::handle_workspace_symbol)
528 .on::<lsp_types::request::OnTypeFormatting>(handlers::handle_on_type_formatting) 529 .on::<lsp_types::request::OnTypeFormatting>(handlers::handle_on_type_formatting)
529 .on::<lsp_types::request::DocumentSymbolRequest>(handlers::handle_document_symbol) 530 .on::<lsp_types::request::DocumentSymbolRequest>(handlers::handle_document_symbol)
530 .on::<lsp_types::request::WorkspaceSymbol>(handlers::handle_workspace_symbol)
531 .on::<lsp_types::request::GotoDefinition>(handlers::handle_goto_definition) 531 .on::<lsp_types::request::GotoDefinition>(handlers::handle_goto_definition)
532 .on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation) 532 .on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation)
533 .on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition) 533 .on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
diff --git a/crates/rust-analyzer/src/semantic_tokens.rs b/crates/rust-analyzer/src/semantic_tokens.rs
index ecab89b2a..4fd576adb 100644
--- a/crates/rust-analyzer/src/semantic_tokens.rs
+++ b/crates/rust-analyzer/src/semantic_tokens.rs
@@ -91,6 +91,7 @@ define_semantic_token_modifiers![
91 (INJECTED, "injected"), 91 (INJECTED, "injected"),
92 (MUTABLE, "mutable"), 92 (MUTABLE, "mutable"),
93 (CONSUMING, "consuming"), 93 (CONSUMING, "consuming"),
94 (ASYNC, "async"),
94 (UNSAFE, "unsafe"), 95 (UNSAFE, "unsafe"),
95 (ATTRIBUTE_MODIFIER, "attribute"), 96 (ATTRIBUTE_MODIFIER, "attribute"),
96 (TRAIT_MODIFIER, "trait"), 97 (TRAIT_MODIFIER, "trait"),
@@ -183,8 +184,8 @@ pub(crate) fn diff_tokens(old: &[SemanticToken], new: &[SemanticToken]) -> Vec<S
183 } 184 }
184} 185}
185 186
186pub(crate) fn type_index(type_: SemanticTokenType) -> u32 { 187pub(crate) fn type_index(ty: SemanticTokenType) -> u32 {
187 SUPPORTED_TYPES.iter().position(|it| *it == type_).unwrap() as u32 188 SUPPORTED_TYPES.iter().position(|it| *it == ty).unwrap() as u32
188} 189}
189 190
190#[cfg(test)] 191#[cfg(test)]
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs
index ecf6fd12f..9dec46c78 100644
--- a/crates/rust-analyzer/src/to_proto.rs
+++ b/crates/rust-analyzer/src/to_proto.rs
@@ -381,6 +381,7 @@ pub(crate) fn semantic_tokens(
381 text: &str, 381 text: &str,
382 line_index: &LineIndex, 382 line_index: &LineIndex,
383 highlights: Vec<HlRange>, 383 highlights: Vec<HlRange>,
384 highlight_strings: bool,
384) -> lsp_types::SemanticTokens { 385) -> lsp_types::SemanticTokens {
385 let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string(); 386 let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
386 let mut builder = semantic_tokens::SemanticTokensBuilder::new(id); 387 let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
@@ -389,8 +390,11 @@ pub(crate) fn semantic_tokens(
389 if highlight_range.highlight.is_empty() { 390 if highlight_range.highlight.is_empty() {
390 continue; 391 continue;
391 } 392 }
392 let (type_, mods) = semantic_token_type_and_modifiers(highlight_range.highlight); 393 let (ty, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
393 let token_index = semantic_tokens::type_index(type_); 394 if !highlight_strings && ty == lsp_types::SemanticTokenType::STRING {
395 continue;
396 }
397 let token_index = semantic_tokens::type_index(ty);
394 let modifier_bitset = mods.0; 398 let modifier_bitset = mods.0;
395 399
396 for mut text_range in line_index.index.lines(highlight_range.range) { 400 for mut text_range in line_index.index.lines(highlight_range.range) {
@@ -422,7 +426,7 @@ fn semantic_token_type_and_modifiers(
422 let type_ = match highlight.tag { 426 let type_ = match highlight.tag {
423 HlTag::Symbol(symbol) => match symbol { 427 HlTag::Symbol(symbol) => match symbol {
424 SymbolKind::Module => lsp_types::SemanticTokenType::NAMESPACE, 428 SymbolKind::Module => lsp_types::SemanticTokenType::NAMESPACE,
425 SymbolKind::Impl => lsp_types::SemanticTokenType::TYPE, 429 SymbolKind::Impl => semantic_tokens::TYPE_ALIAS,
426 SymbolKind::Field => lsp_types::SemanticTokenType::PROPERTY, 430 SymbolKind::Field => lsp_types::SemanticTokenType::PROPERTY,
427 SymbolKind::TypeParam => lsp_types::SemanticTokenType::TYPE_PARAMETER, 431 SymbolKind::TypeParam => lsp_types::SemanticTokenType::TYPE_PARAMETER,
428 SymbolKind::ConstParam => semantic_tokens::CONST_PARAMETER, 432 SymbolKind::ConstParam => semantic_tokens::CONST_PARAMETER,
@@ -496,6 +500,7 @@ fn semantic_token_type_and_modifiers(
496 HlMod::ControlFlow => semantic_tokens::CONTROL_FLOW, 500 HlMod::ControlFlow => semantic_tokens::CONTROL_FLOW,
497 HlMod::Mutable => semantic_tokens::MUTABLE, 501 HlMod::Mutable => semantic_tokens::MUTABLE,
498 HlMod::Consuming => semantic_tokens::CONSUMING, 502 HlMod::Consuming => semantic_tokens::CONSUMING,
503 HlMod::Async => semantic_tokens::ASYNC,
499 HlMod::Unsafe => semantic_tokens::UNSAFE, 504 HlMod::Unsafe => semantic_tokens::UNSAFE,
500 HlMod::Callable => semantic_tokens::CALLABLE, 505 HlMod::Callable => semantic_tokens::CALLABLE,
501 HlMod::Static => lsp_types::SemanticTokenModifier::STATIC, 506 HlMod::Static => lsp_types::SemanticTokenModifier::STATIC,
diff --git a/crates/rust-analyzer/tests/rust-analyzer/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs
index 9e89209ea..9e89209ea 100644
--- a/crates/rust-analyzer/tests/rust-analyzer/main.rs
+++ b/crates/rust-analyzer/tests/slow-tests/main.rs
diff --git a/crates/rust-analyzer/tests/rust-analyzer/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs
index 75e677762..75e677762 100644
--- a/crates/rust-analyzer/tests/rust-analyzer/support.rs
+++ b/crates/rust-analyzer/tests/slow-tests/support.rs
diff --git a/crates/rust-analyzer/tests/rust-analyzer/testdir.rs b/crates/rust-analyzer/tests/slow-tests/testdir.rs
index 36271344b..36271344b 100644
--- a/crates/rust-analyzer/tests/rust-analyzer/testdir.rs
+++ b/crates/rust-analyzer/tests/slow-tests/testdir.rs
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml
index c0bc59918..747f0b9eb 100644
--- a/crates/syntax/Cargo.toml
+++ b/crates/syntax/Cargo.toml
@@ -13,7 +13,7 @@ doctest = false
13[dependencies] 13[dependencies]
14cov-mark = { version = "1.1", features = ["thread-local"] } 14cov-mark = { version = "1.1", features = ["thread-local"] }
15itertools = "0.10.0" 15itertools = "0.10.0"
16rowan = "=0.13.0-pre.5" 16rowan = "=0.13.0-pre.6"
17rustc_lexer = { version = "716.0.0", package = "rustc-ap-rustc_lexer" } 17rustc_lexer = { version = "716.0.0", package = "rustc-ap-rustc_lexer" }
18rustc-hash = "1.1.0" 18rustc-hash = "1.1.0"
19arrayvec = "0.7" 19arrayvec = "0.7"
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs
index 3f9b84ab9..241713c48 100644
--- a/crates/syntax/src/algo.rs
+++ b/crates/syntax/src/algo.rs
@@ -1,6 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use std::{fmt, hash::BuildHasherDefault, ops::RangeInclusive}; 3use std::{hash::BuildHasherDefault, ops::RangeInclusive};
4 4
5use indexmap::IndexMap; 5use indexmap::IndexMap;
6use itertools::Itertools; 6use itertools::Itertools;
@@ -330,137 +330,6 @@ fn _replace_children(
330 with_children(parent, new_children) 330 with_children(parent, new_children)
331} 331}
332 332
333#[derive(Debug, PartialEq, Eq, Hash)]
334enum InsertPos {
335 FirstChildOf(SyntaxNode),
336 After(SyntaxElement),
337}
338
339#[derive(Default)]
340pub struct SyntaxRewriter<'a> {
341 //FIXME: add debug_assertions that all elements are in fact from the same file.
342 replacements: FxHashMap<SyntaxElement, Replacement>,
343 insertions: IndexMap<InsertPos, Vec<SyntaxElement>>,
344 _pd: std::marker::PhantomData<&'a ()>,
345}
346
347impl fmt::Debug for SyntaxRewriter<'_> {
348 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
349 f.debug_struct("SyntaxRewriter")
350 .field("replacements", &self.replacements)
351 .field("insertions", &self.insertions)
352 .finish()
353 }
354}
355
356impl SyntaxRewriter<'_> {
357 pub fn replace<T: Clone + Into<SyntaxElement>>(&mut self, what: &T, with: &T) {
358 let what = what.clone().into();
359 let replacement = Replacement::Single(with.clone().into());
360 self.replacements.insert(what, replacement);
361 }
362
363 pub fn rewrite(&self, node: &SyntaxNode) -> SyntaxNode {
364 let _p = profile::span("rewrite");
365
366 if self.replacements.is_empty() && self.insertions.is_empty() {
367 return node.clone();
368 }
369 let green = self.rewrite_children(node);
370 with_green(node, green)
371 }
372
373 pub fn rewrite_ast<N: AstNode>(self, node: &N) -> N {
374 N::cast(self.rewrite(node.syntax())).unwrap()
375 }
376
377 /// Returns a node that encompasses all replacements to be done by this rewriter.
378 ///
379 /// Passing the returned node to `rewrite` will apply all replacements queued up in `self`.
380 ///
381 /// Returns `None` when there are no replacements.
382 pub fn rewrite_root(&self) -> Option<SyntaxNode> {
383 let _p = profile::span("rewrite_root");
384 fn element_to_node_or_parent(element: &SyntaxElement) -> Option<SyntaxNode> {
385 match element {
386 SyntaxElement::Node(it) => Some(it.clone()),
387 SyntaxElement::Token(it) => it.parent(),
388 }
389 }
390
391 self.replacements
392 .keys()
393 .filter_map(element_to_node_or_parent)
394 .chain(self.insertions.keys().filter_map(|pos| match pos {
395 InsertPos::FirstChildOf(it) => Some(it.clone()),
396 InsertPos::After(it) => element_to_node_or_parent(it),
397 }))
398 // If we only have one replacement/insertion, we must return its parent node, since `rewrite` does
399 // not replace the node passed to it.
400 .map(|it| it.parent().unwrap_or(it))
401 .fold1(|a, b| least_common_ancestor(&a, &b).unwrap())
402 }
403
404 fn replacement(&self, element: &SyntaxElement) -> Option<Replacement> {
405 self.replacements.get(element).cloned()
406 }
407
408 fn insertions(&self, pos: &InsertPos) -> Option<impl Iterator<Item = SyntaxElement> + '_> {
409 self.insertions.get(pos).map(|insertions| insertions.iter().cloned())
410 }
411
412 fn rewrite_children(&self, node: &SyntaxNode) -> rowan::GreenNode {
413 let _p = profile::span("rewrite_children");
414
415 // FIXME: this could be made much faster.
416 let mut new_children = Vec::new();
417 if let Some(elements) = self.insertions(&InsertPos::FirstChildOf(node.clone())) {
418 new_children.extend(elements.map(element_to_green));
419 }
420 for child in node.children_with_tokens() {
421 self.rewrite_self(&mut new_children, &child);
422 }
423
424 rowan::GreenNode::new(rowan::SyntaxKind(node.kind() as u16), new_children)
425 }
426
427 fn rewrite_self(
428 &self,
429 acc: &mut Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>,
430 element: &SyntaxElement,
431 ) {
432 let _p = profile::span("rewrite_self");
433
434 if let Some(replacement) = self.replacement(&element) {
435 match replacement {
436 Replacement::Single(element) => acc.push(element_to_green(element)),
437 };
438 } else {
439 match element {
440 NodeOrToken::Token(it) => acc.push(NodeOrToken::Token(it.green().to_owned())),
441 NodeOrToken::Node(it) => {
442 acc.push(NodeOrToken::Node(self.rewrite_children(it)));
443 }
444 }
445 }
446 if let Some(elements) = self.insertions(&InsertPos::After(element.clone())) {
447 acc.extend(elements.map(element_to_green));
448 }
449 }
450}
451
452fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
453 match element {
454 NodeOrToken::Node(it) => NodeOrToken::Node(it.green().into_owned()),
455 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().to_owned()),
456 }
457}
458
459#[derive(Clone, Debug)]
460enum Replacement {
461 Single(SyntaxElement),
462}
463
464fn with_children( 333fn with_children(
465 parent: &SyntaxNode, 334 parent: &SyntaxNode,
466 new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, 335 new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>,
diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs
index 7f472d4db..a8071b51d 100644
--- a/crates/syntax/src/ast.rs
+++ b/crates/syntax/src/ast.rs
@@ -47,6 +47,12 @@ pub trait AstNode {
47 { 47 {
48 Self::cast(self.syntax().clone_for_update()).unwrap() 48 Self::cast(self.syntax().clone_for_update()).unwrap()
49 } 49 }
50 fn clone_subtree(&self) -> Self
51 where
52 Self: Sized,
53 {
54 Self::cast(self.syntax().clone_subtree()).unwrap()
55 }
50} 56}
51 57
52/// Like `AstNode`, but wraps tokens rather than interior nodes. 58/// Like `AstNode`, but wraps tokens rather than interior nodes.
diff --git a/crates/syntax/src/ast/edit.rs b/crates/syntax/src/ast/edit.rs
index cbc75f922..61952377f 100644
--- a/crates/syntax/src/ast/edit.rs
+++ b/crates/syntax/src/ast/edit.rs
@@ -2,20 +2,16 @@
2//! immutable, all function here return a fresh copy of the tree, instead of 2//! immutable, all function here return a fresh copy of the tree, instead of
3//! doing an in-place modification. 3//! doing an in-place modification.
4use std::{ 4use std::{
5 array, fmt, iter, 5 fmt, iter,
6 ops::{self, RangeInclusive}, 6 ops::{self, RangeInclusive},
7}; 7};
8 8
9use arrayvec::ArrayVec; 9use arrayvec::ArrayVec;
10 10
11use crate::{ 11use crate::{
12 algo::{self, SyntaxRewriter}, 12 algo,
13 ast::{ 13 ast::{self, make, AstNode},
14 self, 14 ted, AstToken, InsertPosition, NodeOrToken, SyntaxElement, SyntaxKind,
15 make::{self, tokens},
16 AstNode, GenericParamsOwner, NameOwner, TypeBoundsOwner,
17 },
18 AstToken, Direction, InsertPosition, SmolStr, SyntaxElement, SyntaxKind,
19 SyntaxKind::{ATTR, COMMENT, WHITESPACE}, 15 SyntaxKind::{ATTR, COMMENT, WHITESPACE},
20 SyntaxNode, SyntaxToken, T, 16 SyntaxNode, SyntaxToken, T,
21}; 17};
@@ -29,250 +25,6 @@ impl ast::BinExpr {
29 } 25 }
30} 26}
31 27
32impl ast::Fn {
33 #[must_use]
34 pub fn with_body(&self, body: ast::BlockExpr) -> ast::Fn {
35 let mut to_insert: ArrayVec<SyntaxElement, 2> = ArrayVec::new();
36 let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.body() {
37 old_body.syntax().clone().into()
38 } else if let Some(semi) = self.semicolon_token() {
39 to_insert.push(make::tokens::single_space().into());
40 semi.into()
41 } else {
42 to_insert.push(make::tokens::single_space().into());
43 to_insert.push(body.syntax().clone().into());
44 return self.insert_children(InsertPosition::Last, to_insert);
45 };
46 to_insert.push(body.syntax().clone().into());
47 self.replace_children(single_node(old_body_or_semi), to_insert)
48 }
49
50 #[must_use]
51 pub fn with_generic_param_list(&self, generic_args: ast::GenericParamList) -> ast::Fn {
52 if let Some(old) = self.generic_param_list() {
53 return self.replace_descendant(old, generic_args);
54 }
55
56 let anchor = self.name().expect("The function must have a name").syntax().clone();
57
58 let to_insert = [generic_args.syntax().clone().into()];
59 self.insert_children(InsertPosition::After(anchor.into()), array::IntoIter::new(to_insert))
60 }
61}
62
63fn make_multiline<N>(node: N) -> N
64where
65 N: AstNode + Clone,
66{
67 let l_curly = match node.syntax().children_with_tokens().find(|it| it.kind() == T!['{']) {
68 Some(it) => it,
69 None => return node,
70 };
71 let sibling = match l_curly.next_sibling_or_token() {
72 Some(it) => it,
73 None => return node,
74 };
75 let existing_ws = match sibling.as_token() {
76 None => None,
77 Some(tok) if tok.kind() != WHITESPACE => None,
78 Some(ws) => {
79 if ws.text().contains('\n') {
80 return node;
81 }
82 Some(ws.clone())
83 }
84 };
85
86 let indent = leading_indent(node.syntax()).unwrap_or_default();
87 let ws = tokens::WsBuilder::new(&format!("\n{}", indent));
88 let to_insert = iter::once(ws.ws().into());
89 match existing_ws {
90 None => node.insert_children(InsertPosition::After(l_curly), to_insert),
91 Some(ws) => node.replace_children(single_node(ws), to_insert),
92 }
93}
94
95impl ast::Impl {
96 #[must_use]
97 pub fn with_assoc_item_list(&self, items: ast::AssocItemList) -> ast::Impl {
98 let mut to_insert: ArrayVec<SyntaxElement, 2> = ArrayVec::new();
99 if let Some(old_items) = self.assoc_item_list() {
100 let to_replace: SyntaxElement = old_items.syntax().clone().into();
101 to_insert.push(items.syntax().clone().into());
102 self.replace_children(single_node(to_replace), to_insert)
103 } else {
104 to_insert.push(make::tokens::single_space().into());
105 to_insert.push(items.syntax().clone().into());
106 self.insert_children(InsertPosition::Last, to_insert)
107 }
108 }
109}
110
111impl ast::AssocItemList {
112 #[must_use]
113 pub fn append_items(
114 &self,
115 items: impl IntoIterator<Item = ast::AssocItem>,
116 ) -> ast::AssocItemList {
117 let mut res = self.clone();
118 if !self.syntax().text().contains_char('\n') {
119 res = make_multiline(res);
120 }
121 items.into_iter().for_each(|it| res = res.append_item(it));
122 res.fixup_trailing_whitespace().unwrap_or(res)
123 }
124
125 #[must_use]
126 pub fn append_item(&self, item: ast::AssocItem) -> ast::AssocItemList {
127 let (indent, position, whitespace) = match self.assoc_items().last() {
128 Some(it) => (
129 leading_indent(it.syntax()).unwrap_or_default().to_string(),
130 InsertPosition::After(it.syntax().clone().into()),
131 "\n\n",
132 ),
133 None => match self.l_curly_token() {
134 Some(it) => (
135 " ".to_string() + &leading_indent(self.syntax()).unwrap_or_default(),
136 InsertPosition::After(it.into()),
137 "\n",
138 ),
139 None => return self.clone(),
140 },
141 };
142 let ws = tokens::WsBuilder::new(&format!("{}{}", whitespace, indent));
143 let to_insert: ArrayVec<SyntaxElement, 2> =
144 [ws.ws().into(), item.syntax().clone().into()].into();
145 self.insert_children(position, to_insert)
146 }
147
148 /// Remove extra whitespace between last item and closing curly brace.
149 fn fixup_trailing_whitespace(&self) -> Option<ast::AssocItemList> {
150 let first_token_after_items =
151 self.assoc_items().last()?.syntax().next_sibling_or_token()?;
152 let last_token_before_curly = self.r_curly_token()?.prev_sibling_or_token()?;
153 if last_token_before_curly != first_token_after_items {
154 // there is something more between last item and
155 // right curly than just whitespace - bail out
156 return None;
157 }
158 let whitespace =
159 last_token_before_curly.clone().into_token().and_then(ast::Whitespace::cast)?;
160 let text = whitespace.syntax().text();
161 let newline = text.rfind('\n')?;
162 let keep = tokens::WsBuilder::new(&text[newline..]);
163 Some(self.replace_children(
164 first_token_after_items..=last_token_before_curly,
165 std::iter::once(keep.ws().into()),
166 ))
167 }
168}
169
170impl ast::RecordExprFieldList {
171 #[must_use]
172 pub fn append_field(&self, field: &ast::RecordExprField) -> ast::RecordExprFieldList {
173 self.insert_field(InsertPosition::Last, field)
174 }
175
176 #[must_use]
177 pub fn insert_field(
178 &self,
179 position: InsertPosition<&'_ ast::RecordExprField>,
180 field: &ast::RecordExprField,
181 ) -> ast::RecordExprFieldList {
182 let is_multiline = self.syntax().text().contains_char('\n');
183 let ws;
184 let space = if is_multiline {
185 ws = tokens::WsBuilder::new(&format!(
186 "\n{} ",
187 leading_indent(self.syntax()).unwrap_or_default()
188 ));
189 ws.ws()
190 } else {
191 tokens::single_space()
192 };
193
194 let mut to_insert: ArrayVec<SyntaxElement, 4> = ArrayVec::new();
195 to_insert.push(space.into());
196 to_insert.push(field.syntax().clone().into());
197 to_insert.push(make::token(T![,]).into());
198
199 macro_rules! after_l_curly {
200 () => {{
201 let anchor = match self.l_curly_token() {
202 Some(it) => it.into(),
203 None => return self.clone(),
204 };
205 InsertPosition::After(anchor)
206 }};
207 }
208
209 macro_rules! after_field {
210 ($anchor:expr) => {
211 if let Some(comma) = $anchor
212 .syntax()
213 .siblings_with_tokens(Direction::Next)
214 .find(|it| it.kind() == T![,])
215 {
216 InsertPosition::After(comma)
217 } else {
218 to_insert.insert(0, make::token(T![,]).into());
219 InsertPosition::After($anchor.syntax().clone().into())
220 }
221 };
222 }
223
224 let position = match position {
225 InsertPosition::First => after_l_curly!(),
226 InsertPosition::Last => {
227 if !is_multiline {
228 // don't insert comma before curly
229 to_insert.pop();
230 }
231 match self.fields().last() {
232 Some(it) => after_field!(it),
233 None => after_l_curly!(),
234 }
235 }
236 InsertPosition::Before(anchor) => {
237 InsertPosition::Before(anchor.syntax().clone().into())
238 }
239 InsertPosition::After(anchor) => after_field!(anchor),
240 };
241
242 self.insert_children(position, to_insert)
243 }
244}
245
246impl ast::TypeAlias {
247 #[must_use]
248 pub fn remove_bounds(&self) -> ast::TypeAlias {
249 let colon = match self.colon_token() {
250 Some(it) => it,
251 None => return self.clone(),
252 };
253 let end = match self.type_bound_list() {
254 Some(it) => it.syntax().clone().into(),
255 None => colon.clone().into(),
256 };
257 self.replace_children(colon.into()..=end, iter::empty())
258 }
259}
260
261impl ast::TypeParam {
262 #[must_use]
263 pub fn remove_bounds(&self) -> ast::TypeParam {
264 let colon = match self.colon_token() {
265 Some(it) => it,
266 None => return self.clone(),
267 };
268 let end = match self.type_bound_list() {
269 Some(it) => it.syntax().clone().into(),
270 None => colon.clone().into(),
271 };
272 self.replace_children(colon.into()..=end, iter::empty())
273 }
274}
275
276impl ast::Path { 28impl ast::Path {
277 #[must_use] 29 #[must_use]
278 pub fn with_segment(&self, segment: ast::PathSegment) -> ast::Path { 30 pub fn with_segment(&self, segment: ast::PathSegment) -> ast::Path {
@@ -313,33 +65,7 @@ impl ast::PathSegment {
313 } 65 }
314} 66}
315 67
316impl ast::Use {
317 #[must_use]
318 pub fn with_use_tree(&self, use_tree: ast::UseTree) -> ast::Use {
319 if let Some(old) = self.use_tree() {
320 return self.replace_descendant(old, use_tree);
321 }
322 self.clone()
323 }
324}
325
326impl ast::UseTree { 68impl ast::UseTree {
327 #[must_use]
328 pub fn with_path(&self, path: ast::Path) -> ast::UseTree {
329 if let Some(old) = self.path() {
330 return self.replace_descendant(old, path);
331 }
332 self.clone()
333 }
334
335 #[must_use]
336 pub fn with_use_tree_list(&self, use_tree_list: ast::UseTreeList) -> ast::UseTree {
337 if let Some(old) = self.use_tree_list() {
338 return self.replace_descendant(old, use_tree_list);
339 }
340 self.clone()
341 }
342
343 /// Splits off the given prefix, making it the path component of the use tree, appending the rest of the path to all UseTreeList items. 69 /// Splits off the given prefix, making it the path component of the use tree, appending the rest of the path to all UseTreeList items.
344 #[must_use] 70 #[must_use]
345 pub fn split_prefix(&self, prefix: &ast::Path) -> ast::UseTree { 71 pub fn split_prefix(&self, prefix: &ast::Path) -> ast::UseTree {
@@ -376,134 +102,6 @@ impl ast::UseTree {
376 } 102 }
377} 103}
378 104
379impl ast::MatchArmList {
380 #[must_use]
381 pub fn append_arms(&self, items: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList {
382 let mut res = self.clone();
383 res = res.strip_if_only_whitespace();
384 if !res.syntax().text().contains_char('\n') {
385 res = make_multiline(res);
386 }
387 items.into_iter().for_each(|it| res = res.append_arm(it));
388 res
389 }
390
391 fn strip_if_only_whitespace(&self) -> ast::MatchArmList {
392 let mut iter = self.syntax().children_with_tokens().skip_while(|it| it.kind() != T!['{']);
393 iter.next(); // Eat the curly
394 let mut inner = iter.take_while(|it| it.kind() != T!['}']);
395 if !inner.clone().all(|it| it.kind() == WHITESPACE) {
396 return self.clone();
397 }
398 let start = match inner.next() {
399 Some(s) => s,
400 None => return self.clone(),
401 };
402 let end = match inner.last() {
403 Some(s) => s,
404 None => start.clone(),
405 };
406 self.replace_children(start..=end, &mut iter::empty())
407 }
408
409 #[must_use]
410 pub fn remove_placeholder(&self) -> ast::MatchArmList {
411 let placeholder =
412 self.arms().find(|arm| matches!(arm.pat(), Some(ast::Pat::WildcardPat(_))));
413 if let Some(placeholder) = placeholder {
414 self.remove_arm(&placeholder)
415 } else {
416 self.clone()
417 }
418 }
419
420 #[must_use]
421 fn remove_arm(&self, arm: &ast::MatchArm) -> ast::MatchArmList {
422 let start = arm.syntax().clone();
423 let end = if let Some(comma) = start
424 .siblings_with_tokens(Direction::Next)
425 .skip(1)
426 .find(|it| !it.kind().is_trivia())
427 .filter(|it| it.kind() == T![,])
428 {
429 comma
430 } else {
431 start.clone().into()
432 };
433 self.replace_children(start.into()..=end, None)
434 }
435
436 #[must_use]
437 pub fn append_arm(&self, item: ast::MatchArm) -> ast::MatchArmList {
438 let r_curly = match self.syntax().children_with_tokens().find(|it| it.kind() == T!['}']) {
439 Some(t) => t,
440 None => return self.clone(),
441 };
442 let position = InsertPosition::Before(r_curly);
443 let arm_ws = tokens::WsBuilder::new(" ");
444 let match_indent = &leading_indent(self.syntax()).unwrap_or_default();
445 let match_ws = tokens::WsBuilder::new(&format!("\n{}", match_indent));
446 let to_insert: ArrayVec<SyntaxElement, 3> =
447 [arm_ws.ws().into(), item.syntax().clone().into(), match_ws.ws().into()].into();
448 self.insert_children(position, to_insert)
449 }
450}
451
452impl ast::GenericParamList {
453 #[must_use]
454 pub fn append_params(
455 &self,
456 params: impl IntoIterator<Item = ast::GenericParam>,
457 ) -> ast::GenericParamList {
458 let mut res = self.clone();
459 params.into_iter().for_each(|it| res = res.append_param(it));
460 res
461 }
462
463 #[must_use]
464 pub fn append_param(&self, item: ast::GenericParam) -> ast::GenericParamList {
465 let space = tokens::single_space();
466
467 let mut to_insert: ArrayVec<SyntaxElement, 4> = ArrayVec::new();
468 if self.generic_params().next().is_some() {
469 to_insert.push(space.into());
470 }
471 to_insert.push(item.syntax().clone().into());
472
473 macro_rules! after_l_angle {
474 () => {{
475 let anchor = match self.l_angle_token() {
476 Some(it) => it.into(),
477 None => return self.clone(),
478 };
479 InsertPosition::After(anchor)
480 }};
481 }
482
483 macro_rules! after_field {
484 ($anchor:expr) => {
485 if let Some(comma) = $anchor
486 .syntax()
487 .siblings_with_tokens(Direction::Next)
488 .find(|it| it.kind() == T![,])
489 {
490 InsertPosition::After(comma)
491 } else {
492 to_insert.insert(0, make::token(T![,]).into());
493 InsertPosition::After($anchor.syntax().clone().into())
494 }
495 };
496 }
497
498 let position = match self.generic_params().last() {
499 Some(it) => after_field!(it),
500 None => after_l_angle!(),
501 };
502
503 self.insert_children(position, to_insert)
504 }
505}
506
507#[must_use] 105#[must_use]
508pub fn remove_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N { 106pub fn remove_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N {
509 N::cast(remove_attrs_and_docs_inner(node.syntax().clone())).unwrap() 107 N::cast(remove_attrs_and_docs_inner(node.syntax().clone())).unwrap()
@@ -554,6 +152,12 @@ impl ops::Add<u8> for IndentLevel {
554} 152}
555 153
556impl IndentLevel { 154impl IndentLevel {
155 pub fn single() -> IndentLevel {
156 IndentLevel(0)
157 }
158 pub fn is_zero(&self) -> bool {
159 self.0 == 0
160 }
557 pub fn from_element(element: &SyntaxElement) -> IndentLevel { 161 pub fn from_element(element: &SyntaxElement) -> IndentLevel {
558 match element { 162 match element {
559 rowan::NodeOrToken::Node(it) => IndentLevel::from_node(it), 163 rowan::NodeOrToken::Node(it) => IndentLevel::from_node(it),
@@ -588,54 +192,40 @@ impl IndentLevel {
588 /// ``` 192 /// ```
589 /// if you indent the block, the `{` token would stay put. 193 /// if you indent the block, the `{` token would stay put.
590 fn increase_indent(self, node: SyntaxNode) -> SyntaxNode { 194 fn increase_indent(self, node: SyntaxNode) -> SyntaxNode {
591 let mut rewriter = SyntaxRewriter::default(); 195 let res = node.clone_subtree().clone_for_update();
592 node.descendants_with_tokens() 196 let tokens = res.preorder_with_tokens().filter_map(|event| match event {
593 .filter_map(|el| el.into_token()) 197 rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it),
594 .filter_map(ast::Whitespace::cast) 198 _ => None,
595 .filter(|ws| { 199 });
596 let text = ws.syntax().text(); 200 for token in tokens {
597 text.contains('\n') 201 if let Some(ws) = ast::Whitespace::cast(token) {
598 }) 202 if ws.text().contains('\n') {
599 .for_each(|ws| { 203 let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self));
600 let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self,)); 204 ted::replace(ws.syntax(), &new_ws)
601 rewriter.replace(ws.syntax(), &new_ws) 205 }
602 }); 206 }
603 rewriter.rewrite(&node) 207 }
208 res.clone_subtree()
604 } 209 }
605 210
606 fn decrease_indent(self, node: SyntaxNode) -> SyntaxNode { 211 fn decrease_indent(self, node: SyntaxNode) -> SyntaxNode {
607 let mut rewriter = SyntaxRewriter::default(); 212 let res = node.clone_subtree().clone_for_update();
608 node.descendants_with_tokens() 213 let tokens = res.preorder_with_tokens().filter_map(|event| match event {
609 .filter_map(|el| el.into_token()) 214 rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it),
610 .filter_map(ast::Whitespace::cast) 215 _ => None,
611 .filter(|ws| { 216 });
612 let text = ws.syntax().text(); 217 for token in tokens {
613 text.contains('\n') 218 if let Some(ws) = ast::Whitespace::cast(token) {
614 }) 219 if ws.text().contains('\n') {
615 .for_each(|ws| { 220 let new_ws = make::tokens::whitespace(
616 let new_ws = make::tokens::whitespace( 221 &ws.syntax().text().replace(&format!("\n{}", self), "\n"),
617 &ws.syntax().text().replace(&format!("\n{}", self), "\n"), 222 );
618 ); 223 ted::replace(ws.syntax(), &new_ws)
619 rewriter.replace(ws.syntax(), &new_ws) 224 }
620 });
621 rewriter.rewrite(&node)
622 }
623}
624
625// FIXME: replace usages with IndentLevel above
626fn leading_indent(node: &SyntaxNode) -> Option<SmolStr> {
627 for token in prev_tokens(node.first_token()?) {
628 if let Some(ws) = ast::Whitespace::cast(token.clone()) {
629 let ws_text = ws.text();
630 if let Some(pos) = ws_text.rfind('\n') {
631 return Some(ws_text[pos + 1..].into());
632 } 225 }
633 } 226 }
634 if token.text().contains('\n') { 227 res.clone_subtree()
635 break;
636 }
637 } 228 }
638 None
639} 229}
640 230
641fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> { 231fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> {
@@ -662,13 +252,6 @@ pub trait AstNodeEdit: AstNode + Clone + Sized {
662 let new_syntax = algo::replace_children(self.syntax(), to_replace, to_insert); 252 let new_syntax = algo::replace_children(self.syntax(), to_replace, to_insert);
663 Self::cast(new_syntax).unwrap() 253 Self::cast(new_syntax).unwrap()
664 } 254 }
665
666 #[must_use]
667 fn replace_descendant<D: AstNode>(&self, old: D, new: D) -> Self {
668 let mut rewriter = SyntaxRewriter::default();
669 rewriter.replace(old.syntax(), new.syntax());
670 rewriter.rewrite_ast(self)
671 }
672 fn indent_level(&self) -> IndentLevel { 255 fn indent_level(&self) -> IndentLevel {
673 IndentLevel::from_node(self.syntax()) 256 IndentLevel::from_node(self.syntax())
674 } 257 }
diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs
index 168355555..2676ed8c9 100644
--- a/crates/syntax/src/ast/edit_in_place.rs
+++ b/crates/syntax/src/ast/edit_in_place.rs
@@ -2,13 +2,18 @@
2 2
3use std::iter::empty; 3use std::iter::empty;
4 4
5use parser::T; 5use parser::{SyntaxKind, T};
6use rowan::SyntaxElement;
6 7
7use crate::{ 8use crate::{
8 algo::neighbor, 9 algo::neighbor,
9 ast::{self, edit::AstNodeEdit, make, GenericParamsOwner, WhereClause}, 10 ast::{
11 self,
12 edit::{AstNodeEdit, IndentLevel},
13 make, GenericParamsOwner,
14 },
10 ted::{self, Position}, 15 ted::{self, Position},
11 AstNode, AstToken, Direction, 16 AstNode, AstToken, Direction, SyntaxNode,
12}; 17};
13 18
14use super::NameOwner; 19use super::NameOwner;
@@ -37,7 +42,7 @@ impl GenericParamsOwnerEdit for ast::Fn {
37 } 42 }
38 } 43 }
39 44
40 fn get_or_create_where_clause(&self) -> WhereClause { 45 fn get_or_create_where_clause(&self) -> ast::WhereClause {
41 if self.where_clause().is_none() { 46 if self.where_clause().is_none() {
42 let position = if let Some(ty) = self.ret_type() { 47 let position = if let Some(ty) = self.ret_type() {
43 Position::after(ty.syntax()) 48 Position::after(ty.syntax())
@@ -67,7 +72,7 @@ impl GenericParamsOwnerEdit for ast::Impl {
67 } 72 }
68 } 73 }
69 74
70 fn get_or_create_where_clause(&self) -> WhereClause { 75 fn get_or_create_where_clause(&self) -> ast::WhereClause {
71 if self.where_clause().is_none() { 76 if self.where_clause().is_none() {
72 let position = if let Some(items) = self.assoc_item_list() { 77 let position = if let Some(items) = self.assoc_item_list() {
73 Position::before(items.syntax()) 78 Position::before(items.syntax())
@@ -97,7 +102,7 @@ impl GenericParamsOwnerEdit for ast::Trait {
97 } 102 }
98 } 103 }
99 104
100 fn get_or_create_where_clause(&self) -> WhereClause { 105 fn get_or_create_where_clause(&self) -> ast::WhereClause {
101 if self.where_clause().is_none() { 106 if self.where_clause().is_none() {
102 let position = if let Some(items) = self.assoc_item_list() { 107 let position = if let Some(items) = self.assoc_item_list() {
103 Position::before(items.syntax()) 108 Position::before(items.syntax())
@@ -127,7 +132,7 @@ impl GenericParamsOwnerEdit for ast::Struct {
127 } 132 }
128 } 133 }
129 134
130 fn get_or_create_where_clause(&self) -> WhereClause { 135 fn get_or_create_where_clause(&self) -> ast::WhereClause {
131 if self.where_clause().is_none() { 136 if self.where_clause().is_none() {
132 let tfl = self.field_list().and_then(|fl| match fl { 137 let tfl = self.field_list().and_then(|fl| match fl {
133 ast::FieldList::RecordFieldList(_) => None, 138 ast::FieldList::RecordFieldList(_) => None,
@@ -165,7 +170,7 @@ impl GenericParamsOwnerEdit for ast::Enum {
165 } 170 }
166 } 171 }
167 172
168 fn get_or_create_where_clause(&self) -> WhereClause { 173 fn get_or_create_where_clause(&self) -> ast::WhereClause {
169 if self.where_clause().is_none() { 174 if self.where_clause().is_none() {
170 let position = if let Some(gpl) = self.generic_param_list() { 175 let position = if let Some(gpl) = self.generic_param_list() {
171 Position::after(gpl.syntax()) 176 Position::after(gpl.syntax())
@@ -272,6 +277,167 @@ impl ast::Use {
272 } 277 }
273} 278}
274 279
280impl ast::Impl {
281 pub fn get_or_create_assoc_item_list(&self) -> ast::AssocItemList {
282 if self.assoc_item_list().is_none() {
283 let assoc_item_list = make::assoc_item_list().clone_for_update();
284 ted::append_child(self.syntax(), assoc_item_list.syntax());
285 }
286 self.assoc_item_list().unwrap()
287 }
288}
289
290impl ast::AssocItemList {
291 pub fn add_item(&self, item: ast::AssocItem) {
292 let (indent, position, whitespace) = match self.assoc_items().last() {
293 Some(last_item) => (
294 IndentLevel::from_node(last_item.syntax()),
295 Position::after(last_item.syntax()),
296 "\n\n",
297 ),
298 None => match self.l_curly_token() {
299 Some(l_curly) => {
300 normalize_ws_between_braces(self.syntax());
301 (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly), "\n")
302 }
303 None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"),
304 },
305 };
306 let elements: Vec<SyntaxElement<_>> = vec![
307 make::tokens::whitespace(&format!("{}{}", whitespace, indent)).into(),
308 item.syntax().clone().into(),
309 ];
310 ted::insert_all(position, elements);
311 }
312}
313
314impl ast::Fn {
315 pub fn get_or_create_body(&self) -> ast::BlockExpr {
316 if self.body().is_none() {
317 let body = make::ext::empty_block_expr().clone_for_update();
318 match self.semicolon_token() {
319 Some(semi) => {
320 ted::replace(semi, body.syntax());
321 ted::insert(Position::before(body.syntax), make::tokens::single_space());
322 }
323 None => ted::append_child(self.syntax(), body.syntax()),
324 }
325 }
326 self.body().unwrap()
327 }
328}
329
330impl ast::MatchArm {
331 pub fn remove(&self) {
332 if let Some(sibling) = self.syntax().prev_sibling_or_token() {
333 if sibling.kind() == SyntaxKind::WHITESPACE {
334 ted::remove(sibling);
335 }
336 }
337 if let Some(sibling) = self.syntax().next_sibling_or_token() {
338 if sibling.kind() == T![,] {
339 ted::remove(sibling);
340 }
341 }
342 ted::remove(self.syntax());
343 }
344}
345
346impl ast::MatchArmList {
347 pub fn add_arm(&self, arm: ast::MatchArm) {
348 normalize_ws_between_braces(self.syntax());
349 let position = match self.arms().last() {
350 Some(last_arm) => {
351 let curly = last_arm
352 .syntax()
353 .siblings_with_tokens(Direction::Next)
354 .find(|it| it.kind() == T![,]);
355 Position::after(curly.unwrap_or_else(|| last_arm.syntax().clone().into()))
356 }
357 None => match self.l_curly_token() {
358 Some(it) => Position::after(it),
359 None => Position::last_child_of(self.syntax()),
360 },
361 };
362 let indent = IndentLevel::from_node(self.syntax()) + 1;
363 let elements = vec![
364 make::tokens::whitespace(&format!("\n{}", indent)).into(),
365 arm.syntax().clone().into(),
366 ];
367 ted::insert_all(position, elements);
368 }
369}
370
371impl ast::RecordExprFieldList {
372 pub fn add_field(&self, field: ast::RecordExprField) {
373 let is_multiline = self.syntax().text().contains_char('\n');
374 let whitespace = if is_multiline {
375 let indent = IndentLevel::from_node(self.syntax()) + 1;
376 make::tokens::whitespace(&format!("\n{}", indent))
377 } else {
378 make::tokens::single_space()
379 };
380
381 if is_multiline {
382 normalize_ws_between_braces(self.syntax());
383 }
384
385 let position = match self.fields().last() {
386 Some(last_field) => {
387 let comma = match last_field
388 .syntax()
389 .siblings_with_tokens(Direction::Next)
390 .filter_map(|it| it.into_token())
391 .find(|it| it.kind() == T![,])
392 {
393 Some(it) => it,
394 None => {
395 let comma = ast::make::token(T![,]);
396 ted::insert(Position::after(last_field.syntax()), &comma);
397 comma
398 }
399 };
400 Position::after(comma)
401 }
402 None => match self.l_curly_token() {
403 Some(it) => Position::after(it),
404 None => Position::last_child_of(self.syntax()),
405 },
406 };
407
408 ted::insert_all(position, vec![whitespace.into(), field.syntax().clone().into()]);
409 if is_multiline {
410 ted::insert(Position::after(field.syntax()), ast::make::token(T![,]));
411 }
412 }
413}
414
415fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> {
416 let l = node
417 .children_with_tokens()
418 .filter_map(|it| it.into_token())
419 .find(|it| it.kind() == T!['{'])?;
420 let r = node
421 .children_with_tokens()
422 .filter_map(|it| it.into_token())
423 .find(|it| it.kind() == T!['}'])?;
424
425 let indent = IndentLevel::from_node(node);
426
427 match l.next_sibling_or_token() {
428 Some(ws) if ws.kind() == SyntaxKind::WHITESPACE => {
429 if ws.next_sibling_or_token()?.into_token()? == r {
430 ted::replace(ws, make::tokens::whitespace(&format!("\n{}", indent)));
431 }
432 }
433 Some(ws) if ws.kind() == T!['}'] => {
434 ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{}", indent)));
435 }
436 _ => (),
437 }
438 Some(())
439}
440
275#[cfg(test)] 441#[cfg(test)]
276mod tests { 442mod tests {
277 use std::fmt; 443 use std::fmt;
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index 1998ad1f6..d13926ded 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -12,7 +12,7 @@
12use itertools::Itertools; 12use itertools::Itertools;
13use stdx::{format_to, never}; 13use stdx::{format_to, never};
14 14
15use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, SyntaxToken}; 15use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxToken};
16 16
17/// While the parent module defines basic atomic "constructors", the `ext` 17/// While the parent module defines basic atomic "constructors", the `ext`
18/// module defines shortcuts for common things. 18/// module defines shortcuts for common things.
@@ -99,7 +99,7 @@ fn ty_from_text(text: &str) -> ast::Type {
99} 99}
100 100
101pub fn assoc_item_list() -> ast::AssocItemList { 101pub fn assoc_item_list() -> ast::AssocItemList {
102 ast_from_text("impl C for D {};") 102 ast_from_text("impl C for D {}")
103} 103}
104 104
105pub fn impl_trait(trait_: ast::Path, ty: ast::Path) -> ast::Impl { 105pub fn impl_trait(trait_: ast::Path, ty: ast::Path) -> ast::Impl {
@@ -601,17 +601,11 @@ fn ast_from_text<N: AstNode>(text: &str) -> N {
601 panic!("Failed to make ast node `{}` from text {}", std::any::type_name::<N>(), text) 601 panic!("Failed to make ast node `{}` from text {}", std::any::type_name::<N>(), text)
602 } 602 }
603 }; 603 };
604 let node = node.syntax().clone(); 604 let node = node.clone_subtree();
605 let node = unroot(node);
606 let node = N::cast(node).unwrap();
607 assert_eq!(node.syntax().text_range().start(), 0.into()); 605 assert_eq!(node.syntax().text_range().start(), 0.into());
608 node 606 node
609} 607}
610 608
611fn unroot(n: SyntaxNode) -> SyntaxNode {
612 SyntaxNode::new_root(n.green().into())
613}
614
615pub fn token(kind: SyntaxKind) -> SyntaxToken { 609pub fn token(kind: SyntaxKind) -> SyntaxToken {
616 tokens::SOURCE_FILE 610 tokens::SOURCE_FILE
617 .tree() 611 .tree()
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs
index 29d25a58a..4b1e1ccee 100644
--- a/crates/syntax/src/ast/token_ext.rs
+++ b/crates/syntax/src/ast/token_ext.rs
@@ -143,6 +143,30 @@ impl QuoteOffsets {
143 } 143 }
144} 144}
145 145
146pub trait IsString: AstToken {
147 fn quote_offsets(&self) -> Option<QuoteOffsets> {
148 let text = self.text();
149 let offsets = QuoteOffsets::new(text)?;
150 let o = self.syntax().text_range().start();
151 let offsets = QuoteOffsets {
152 quotes: (offsets.quotes.0 + o, offsets.quotes.1 + o),
153 contents: offsets.contents + o,
154 };
155 Some(offsets)
156 }
157 fn text_range_between_quotes(&self) -> Option<TextRange> {
158 self.quote_offsets().map(|it| it.contents)
159 }
160 fn open_quote_text_range(&self) -> Option<TextRange> {
161 self.quote_offsets().map(|it| it.quotes.0)
162 }
163 fn close_quote_text_range(&self) -> Option<TextRange> {
164 self.quote_offsets().map(|it| it.quotes.1)
165 }
166}
167
168impl IsString for ast::String {}
169
146impl ast::String { 170impl ast::String {
147 pub fn is_raw(&self) -> bool { 171 pub fn is_raw(&self) -> bool {
148 self.text().starts_with('r') 172 self.text().starts_with('r')
@@ -187,32 +211,49 @@ impl ast::String {
187 (false, false) => Some(Cow::Owned(buf)), 211 (false, false) => Some(Cow::Owned(buf)),
188 } 212 }
189 } 213 }
190
191 pub fn quote_offsets(&self) -> Option<QuoteOffsets> {
192 let text = self.text();
193 let offsets = QuoteOffsets::new(text)?;
194 let o = self.syntax().text_range().start();
195 let offsets = QuoteOffsets {
196 quotes: (offsets.quotes.0 + o, offsets.quotes.1 + o),
197 contents: offsets.contents + o,
198 };
199 Some(offsets)
200 }
201 pub fn text_range_between_quotes(&self) -> Option<TextRange> {
202 self.quote_offsets().map(|it| it.contents)
203 }
204 pub fn open_quote_text_range(&self) -> Option<TextRange> {
205 self.quote_offsets().map(|it| it.quotes.0)
206 }
207 pub fn close_quote_text_range(&self) -> Option<TextRange> {
208 self.quote_offsets().map(|it| it.quotes.1)
209 }
210} 214}
211 215
216impl IsString for ast::ByteString {}
217
212impl ast::ByteString { 218impl ast::ByteString {
213 pub fn is_raw(&self) -> bool { 219 pub fn is_raw(&self) -> bool {
214 self.text().starts_with("br") 220 self.text().starts_with("br")
215 } 221 }
222
223 pub fn value(&self) -> Option<Cow<'_, [u8]>> {
224 if self.is_raw() {
225 let text = self.text();
226 let text =
227 &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
228 return Some(Cow::Borrowed(text.as_bytes()));
229 }
230
231 let text = self.text();
232 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
233
234 let mut buf: Vec<u8> = Vec::new();
235 let mut text_iter = text.chars();
236 let mut has_error = false;
237 unescape_literal(text, Mode::ByteStr, &mut |char_range, unescaped_char| match (
238 unescaped_char,
239 buf.capacity() == 0,
240 ) {
241 (Ok(c), false) => buf.push(c as u8),
242 (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (),
243 (Ok(c), true) => {
244 buf.reserve_exact(text.len());
245 buf.extend_from_slice(&text[..char_range.start].as_bytes());
246 buf.push(c as u8);
247 }
248 (Err(_), _) => has_error = true,
249 });
250
251 match (has_error, buf.capacity() == 0) {
252 (true, _) => None,
253 (false, true) => Some(Cow::Borrowed(text.as_bytes())),
254 (false, false) => Some(Cow::Owned(buf)),
255 }
256 }
216} 257}
217 258
218#[derive(Debug)] 259#[derive(Debug)]
diff --git a/crates/syntax/src/parsing.rs b/crates/syntax/src/parsing.rs
index 333bde54a..431ed0699 100644
--- a/crates/syntax/src/parsing.rs
+++ b/crates/syntax/src/parsing.rs
@@ -6,14 +6,13 @@ mod text_token_source;
6mod text_tree_sink; 6mod text_tree_sink;
7mod reparsing; 7mod reparsing;
8 8
9use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode}; 9use parser::SyntaxKind;
10use text_token_source::TextTokenSource; 10use text_token_source::TextTokenSource;
11use text_tree_sink::TextTreeSink; 11use text_tree_sink::TextTreeSink;
12 12
13pub(crate) use lexer::*; 13use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode};
14 14
15pub(crate) use self::reparsing::incremental_reparse; 15pub(crate) use crate::parsing::{lexer::*, reparsing::incremental_reparse};
16use parser::SyntaxKind;
17 16
18pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { 17pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
19 let (tokens, lexer_errors) = tokenize(&text); 18 let (tokens, lexer_errors) = tokenize(&text);
diff --git a/crates/syntax/src/parsing/text_tree_sink.rs b/crates/syntax/src/parsing/text_tree_sink.rs
index 1934204ea..d63ec080b 100644
--- a/crates/syntax/src/parsing/text_tree_sink.rs
+++ b/crates/syntax/src/parsing/text_tree_sink.rs
@@ -147,8 +147,8 @@ fn n_attached_trivias<'a>(
147 trivias: impl Iterator<Item = (SyntaxKind, &'a str)>, 147 trivias: impl Iterator<Item = (SyntaxKind, &'a str)>,
148) -> usize { 148) -> usize {
149 match kind { 149 match kind {
150 MACRO_CALL | MACRO_RULES | MACRO_DEF | CONST | TYPE_ALIAS | STRUCT | UNION | ENUM 150 CONST | ENUM | FN | IMPL | MACRO_CALL | MACRO_DEF | MACRO_RULES | MODULE | RECORD_FIELD
151 | VARIANT | FN | TRAIT | MODULE | RECORD_FIELD | STATIC | USE => { 151 | STATIC | STRUCT | TRAIT | TUPLE_FIELD | TYPE_ALIAS | UNION | USE | VARIANT => {
152 let mut res = 0; 152 let mut res = 0;
153 let mut trivias = trivias.enumerate().peekable(); 153 let mut trivias = trivias.enumerate().peekable();
154 154
diff --git a/crates/syntax/test_data/parser/ok/0045_block_attrs.rast b/crates/syntax/test_data/parser/ok/0045_block_attrs.rast
index 50ab52d32..5e50b4e0b 100644
--- a/crates/syntax/test_data/parser/ok/0045_block_attrs.rast
+++ b/crates/syntax/test_data/parser/ok/0045_block_attrs.rast
@@ -127,9 +127,9 @@ [email protected]
127 [email protected] "\n" 127 [email protected] "\n"
128 [email protected] "}" 128 [email protected] "}"
129 [email protected] "\n\n" 129 [email protected] "\n\n"
130 COMMENT@541..601 "// https://github.com ..." 130 IMPL@541..763
131 WHITESPACE@601..602 "\n" 131 COMMENT@541..601 "// https://github.com ..."
132 IMPL@602..763 132 WHITESPACE@601..602 "\n"
133 [email protected] "impl" 133 [email protected] "impl"
134 [email protected] " " 134 [email protected] " "
135 [email protected] 135 [email protected]
diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md
index 8fcd72d5d..3c4eacfeb 100644
--- a/docs/dev/lsp-extensions.md
+++ b/docs/dev/lsp-extensions.md
@@ -1,5 +1,5 @@
1<!--- 1<!---
2lsp_ext.rs hash: 6e57fc1b345b00e9 2lsp_ext.rs hash: 10a8988e6893e6b2
3 3
4If you need to change the above hash to make the test pass, please check if you 4If you need to change the above hash to make the test pass, please check if you
5need to adjust this doc as well and ping this issue: 5need to adjust this doc as well and ping this issue:
@@ -650,3 +650,37 @@ export const enum Direction {
650 Down = "Down" 650 Down = "Down"
651} 651}
652``` 652```
653
654## Workspace Symbols Filtering
655
656**Issue:** https://github.com/rust-analyzer/rust-analyzer/pull/7698
657
658**Experimental Server Capability:** `{ "workspaceSymbolScopeKindFiltering": boolean }`
659
660Extends the existing `workspace/symbol` request with ability to filter symbols by broad scope and kind of symbol.
661If this capability is set, `workspace/symbol` parameter gains two new optional fields:
662
663
664```typescript
665interface WorkspaceSymbolParams {
666 /**
667 * Return only the symbols defined in the specified scope.
668 */
669 searchScope?: WorkspaceSymbolSearchScope;
670 /**
671 * Return only the symbols of specified kinds.
672 */
673 searchKind?: WorkspaceSymbolSearchKind;
674 ...
675}
676
677const enum WorkspaceSymbolSearchScope {
678 Workspace = "workspace",
679 WorkspaceAndDependencies = "workspaceAndDependencies"
680}
681
682const enum WorkspaceSymbolSearchKind {
683 OnlyTypes = "onlyTypes",
684 AllSymbols = "allSymbols"
685}
686```
diff --git a/docs/dev/style.md b/docs/dev/style.md
index d24a5952e..96dd684b3 100644
--- a/docs/dev/style.md
+++ b/docs/dev/style.md
@@ -636,6 +636,10 @@ use crate::{}
636 636
637// Finally, parent and child modules, but prefer `use crate::`. 637// Finally, parent and child modules, but prefer `use crate::`.
638use super::{} 638use super::{}
639
640// Re-exports are treated as item definitions rather than imports, so they go
641// after imports and modules. Use them sparingly.
642pub use crate::x::Z;
639``` 643```
640 644
641**Rationale:** consistency. 645**Rationale:** consistency.
@@ -694,6 +698,9 @@ Avoid local `use MyEnum::*` imports.
694Prefer `use crate::foo::bar` to `use super::bar` or `use self::bar::baz`. 698Prefer `use crate::foo::bar` to `use super::bar` or `use self::bar::baz`.
695**Rationale:** consistency, this is the style which works in all cases. 699**Rationale:** consistency, this is the style which works in all cases.
696 700
701By default, avoid re-exports.
702**Rationale:** for non-library code, re-exports introduce two ways to use something and allow for inconsistency.
703
697## Order of Items 704## Order of Items
698 705
699Optimize for the reader who sees the file for the first time, and wants to get a general idea about what's going on. 706Optimize for the reader who sees the file for the first time, and wants to get a general idea about what's going on.
@@ -784,13 +791,14 @@ Many names in rust-analyzer conflict with keywords.
784We use mangled names instead of `r#ident` syntax: 791We use mangled names instead of `r#ident` syntax:
785 792
786``` 793```
787struct -> strukt
788crate -> krate 794crate -> krate
789impl -> imp
790trait -> trait_
791fn -> func
792enum -> enum_ 795enum -> enum_
796fn -> func
797impl -> imp
793mod -> module 798mod -> module
799struct -> strukt
800trait -> trait_
801type -> ty
794``` 802```
795 803
796**Rationale:** consistency. 804**Rationale:** consistency.
@@ -944,6 +952,28 @@ match p.current() {
944 952
945## Documentation 953## Documentation
946 954
955Style inline code comments as proper sentences.
956Start with a capital letter, end with a dot.
957
958```rust
959// GOOD
960
961// Only simple single segment paths are allowed.
962MergeBehavior::Last => {
963 tree.use_tree_list().is_none() && tree.path().map(path_len) <= Some(1)
964}
965
966// BAD
967
968// only simple single segment paths are allowed
969MergeBehavior::Last => {
970 tree.use_tree_list().is_none() && tree.path().map(path_len) <= Some(1)
971}
972```
973
974**Rationale:** writing a sentence (or maybe even a paragraph) rather just "a comment" creates a more appropriate frame of mind.
975It tricks you into writing down more of the context you keep in your head while coding.
976
947For `.md` and `.adoc` files, prefer a sentence-per-line format, don't wrap lines. 977For `.md` and `.adoc` files, prefer a sentence-per-line format, don't wrap lines.
948If the line is too long, you want to split the sentence in two :-) 978If the line is too long, you want to split the sentence in two :-)
949 979
diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc
index f70558200..b32411887 100644
--- a/docs/user/generated_config.adoc
+++ b/docs/user/generated_config.adoc
@@ -179,6 +179,15 @@ Controls file watching implementation.
179-- 179--
180These directories will be ignored by rust-analyzer. 180These directories will be ignored by rust-analyzer.
181-- 181--
182[[rust-analyzer.highlighting.strings]]rust-analyzer.highlighting.strings (default: `true`)::
183+
184--
185Use semantic tokens for strings.
186
187In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
188By disabling semantic tokens for strings, other grammars can be used to highlight
189their contents.
190--
182[[rust-analyzer.hoverActions.debug]]rust-analyzer.hoverActions.debug (default: `true`):: 191[[rust-analyzer.hoverActions.debug]]rust-analyzer.hoverActions.debug (default: `true`)::
183+ 192+
184-- 193--
@@ -332,3 +341,13 @@ Additional arguments to `rustfmt`.
332Advanced option, fully override the command rust-analyzer uses for 341Advanced option, fully override the command rust-analyzer uses for
333formatting. 342formatting.
334-- 343--
344[[rust-analyzer.workspace.symbol.search.scope]]rust-analyzer.workspace.symbol.search.scope (default: `"workspace"`)::
345+
346--
347Workspace symbol search scope.
348--
349[[rust-analyzer.workspace.symbol.search.kind]]rust-analyzer.workspace.symbol.search.kind (default: `"only_types"`)::
350+
351--
352Workspace symbol search kind.
353--
diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc
index 797af3f75..f96c09a79 100644
--- a/docs/user/manual.adoc
+++ b/docs/user/manual.adoc
@@ -589,6 +589,7 @@ Here is a **non-exhaustive** list of ways to make rust-analyzer execute arbitrar
589 589
590* proc macros and build scripts are executed by default 590* proc macros and build scripts are executed by default
591* `.cargo/config` can override `rustc` with an arbitrary executable 591* `.cargo/config` can override `rustc` with an arbitrary executable
592* `rust-toolchain.toml` can override `rustc` with an arbitrary executable
592* VS Code plugin reads configuration from project directory, and that can be used to override paths to various executables, like `rustfmt` or `rust-analyzer` itself. 593* VS Code plugin reads configuration from project directory, and that can be used to override paths to various executables, like `rustfmt` or `rust-analyzer` itself.
593* rust-analyzer's syntax trees library uses a lot of `unsafe` and hasn't been properly audited for memory safety. 594* rust-analyzer's syntax trees library uses a lot of `unsafe` and hasn't been properly audited for memory safety.
594 595
diff --git a/editors/code/package.json b/editors/code/package.json
index 0f38a1673..99223c4e8 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -613,6 +613,11 @@
613 "type": "string" 613 "type": "string"
614 } 614 }
615 }, 615 },
616 "rust-analyzer.highlighting.strings": {
617 "markdownDescription": "Use semantic tokens for strings.\n\nIn some editors (e.g. vscode) semantic tokens override other highlighting grammars.\nBy disabling semantic tokens for strings, other grammars can be used to highlight\ntheir contents.",
618 "default": true,
619 "type": "boolean"
620 },
616 "rust-analyzer.hoverActions.debug": { 621 "rust-analyzer.hoverActions.debug": {
617 "markdownDescription": "Whether to show `Debug` action. Only applies when\n`#rust-analyzer.hoverActions.enable#` is set.", 622 "markdownDescription": "Whether to show `Debug` action. Only applies when\n`#rust-analyzer.hoverActions.enable#` is set.",
618 "default": true, 623 "default": true,
@@ -778,6 +783,32 @@
778 "type": "string" 783 "type": "string"
779 } 784 }
780 }, 785 },
786 "rust-analyzer.workspace.symbol.search.scope": {
787 "markdownDescription": "Workspace symbol search scope.",
788 "default": "workspace",
789 "type": "string",
790 "enum": [
791 "workspace",
792 "workspace_and_dependencies"
793 ],
794 "enumDescriptions": [
795 "Search in current workspace only",
796 "Search in current workspace and dependencies"
797 ]
798 },
799 "rust-analyzer.workspace.symbol.search.kind": {
800 "markdownDescription": "Workspace symbol search kind.",
801 "default": "only_types",
802 "type": "string",
803 "enum": [
804 "only_types",
805 "all_symbols"
806 ],
807 "enumDescriptions": [
808 "Search for types only",
809 "Search for all symbols kinds"
810 ]
811 },
781 "$generated-end": false 812 "$generated-end": false
782 } 813 }
783 }, 814 },
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs
index c3c785eff..6c55823eb 100644
--- a/xtask/src/tidy.rs
+++ b/xtask/src/tidy.rs
@@ -347,9 +347,8 @@ struct TidyDocs {
347 347
348impl TidyDocs { 348impl TidyDocs {
349 fn visit(&mut self, path: &Path, text: &str) { 349 fn visit(&mut self, path: &Path, text: &str) {
350 // Test hopefully don't really need comments, and for assists we already 350 // Tests and diagnostic fixes don't need module level comments.
351 // have special comments which are source of doc tests and user docs. 351 if is_exclude_dir(path, &["tests", "test_data", "fixes"]) {
352 if is_exclude_dir(path, &["tests", "test_data"]) {
353 return; 352 return;
354 } 353 }
355 354