aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/flycheck/Cargo.toml4
-rw-r--r--crates/flycheck/src/lib.rs5
-rw-r--r--crates/hir/src/code_model.rs19
-rw-r--r--crates/hir/src/lib.rs11
-rw-r--r--crates/hir_def/src/attr.rs2
-rw-r--r--crates/hir_def/src/find_path.rs69
-rw-r--r--crates/hir_def/src/item_scope.rs22
-rw-r--r--crates/hir_def/src/lib.rs23
-rw-r--r--crates/hir_def/src/nameres.rs12
-rw-r--r--crates/hir_def/src/path/lower.rs13
-rw-r--r--crates/hir_ty/Cargo.toml6
-rw-r--r--crates/hir_ty/src/diagnostics/expr.rs14
-rw-r--r--crates/hir_ty/src/diagnostics/match_check.rs6
-rw-r--r--crates/hir_ty/src/display.rs40
-rw-r--r--crates/hir_ty/src/infer.rs5
-rw-r--r--crates/hir_ty/src/infer/coerce.rs14
-rw-r--r--crates/hir_ty/src/infer/expr.rs39
-rw-r--r--crates/hir_ty/src/infer/pat.rs13
-rw-r--r--crates/hir_ty/src/lib.rs24
-rw-r--r--crates/hir_ty/src/lower.rs14
-rw-r--r--crates/hir_ty/src/method_resolution.rs19
-rw-r--r--crates/hir_ty/src/traits/chalk.rs14
-rw-r--r--crates/hir_ty/src/traits/chalk/mapping.rs44
-rw-r--r--crates/ide/src/folding_ranges.rs55
-rw-r--r--crates/ide/src/hover.rs164
-rw-r--r--crates/ide/src/join_lines.rs74
-rw-r--r--crates/ide/src/references/rename.rs378
-rw-r--r--crates/ide/src/runnables.rs2
-rw-r--r--crates/ide_assists/src/handlers/add_turbo_fish.rs108
-rw-r--r--crates/ide_assists/src/handlers/apply_demorgan.rs45
-rw-r--r--crates/ide_assists/src/handlers/convert_comment_block.rs419
-rw-r--r--crates/ide_assists/src/handlers/extract_variable.rs208
-rw-r--r--crates/ide_assists/src/lib.rs2
-rw-r--r--crates/ide_assists/src/utils.rs2
-rw-r--r--crates/ide_assists/src/utils/suggest_name.rs729
-rw-r--r--crates/ide_completion/src/completions/attribute.rs3
-rw-r--r--crates/ide_completion/src/completions/fn_param.rs27
-rw-r--r--crates/ide_completion/src/completions/keyword.rs35
-rw-r--r--crates/ide_completion/src/completions/qualified_path.rs12
-rw-r--r--crates/ide_db/src/helpers.rs4
-rw-r--r--crates/ide_db/src/helpers/famous_defs_fixture.rs8
-rw-r--r--crates/mbe/Cargo.toml8
-rw-r--r--crates/mbe/src/benchmark.rs40
-rw-r--r--crates/mbe/src/expander.rs16
-rw-r--r--crates/mbe/src/expander/matcher.rs559
-rw-r--r--crates/mbe/src/expander/transcriber.rs12
-rw-r--r--crates/mbe/src/lib.rs11
-rw-r--r--crates/mbe/src/parser.rs80
-rw-r--r--crates/mbe/src/tests.rs23
-rw-r--r--crates/proc_macro_srv/Cargo.toml2
-rw-r--r--crates/proc_macro_srv/src/rustc_server.rs40
-rw-r--r--crates/proc_macro_srv/src/tests/utils.rs3
-rw-r--r--crates/project_model/Cargo.toml10
-rw-r--r--crates/project_model/src/build_data.rs23
-rw-r--r--crates/project_model/src/cargo_workspace.rs9
-rw-r--r--crates/rust-analyzer/Cargo.toml2
-rw-r--r--crates/rust-analyzer/src/bin/args.rs274
-rw-r--r--crates/rust-analyzer/src/bin/flags.rs251
-rw-r--r--crates/rust-analyzer/src/bin/main.rs85
-rw-r--r--crates/rust-analyzer/src/cli/analysis_bench.rs3
-rw-r--r--crates/rust-analyzer/src/cli/analysis_stats.rs2
-rw-r--r--crates/rust-analyzer/src/cli/diagnostics.rs3
-rw-r--r--crates/rust-analyzer/src/cli/load_cargo.rs9
-rw-r--r--crates/rust-analyzer/src/cli/ssr.rs4
-rw-r--r--crates/rust-analyzer/src/config.rs11
-rw-r--r--crates/rust-analyzer/src/diagnostics/to_proto.rs2
-rw-r--r--crates/rust-analyzer/src/main_loop.rs2
-rw-r--r--crates/rust-analyzer/src/reload.rs2
-rw-r--r--crates/rust-analyzer/src/to_proto.rs2
-rw-r--r--crates/syntax/Cargo.toml2
-rw-r--r--crates/syntax/src/ast/edit.rs11
-rw-r--r--crates/syntax/src/ast/token_ext.rs3
72 files changed, 3238 insertions, 973 deletions
diff --git a/crates/flycheck/Cargo.toml b/crates/flycheck/Cargo.toml
index 1d19c7886..2a1a21b28 100644
--- a/crates/flycheck/Cargo.toml
+++ b/crates/flycheck/Cargo.toml
@@ -12,9 +12,9 @@ doctest = false
12[dependencies] 12[dependencies]
13crossbeam-channel = "0.5.0" 13crossbeam-channel = "0.5.0"
14log = "0.4.8" 14log = "0.4.8"
15cargo_metadata = "0.12.2" 15cargo_metadata = "0.13"
16serde_json = "1.0.48" 16serde_json = "1.0.48"
17jod-thread = "0.1.1" 17jod-thread = "0.1.1"
18 18
19toolchain = { path = "../toolchain", version = "0.0.0" } 19toolchain = { path = "../toolchain", version = "0.0.0" }
20stdx = { path = "../stdx", version = "0.0.0" } 20stdx = { path = "../stdx", version = "0.0.0" }
diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs
index e04208006..e2a59497a 100644
--- a/crates/flycheck/src/lib.rs
+++ b/crates/flycheck/src/lib.rs
@@ -194,7 +194,7 @@ impl FlycheckActor {
194 cargo_metadata::Message::BuildScriptExecuted(_) 194 cargo_metadata::Message::BuildScriptExecuted(_)
195 | cargo_metadata::Message::BuildFinished(_) 195 | cargo_metadata::Message::BuildFinished(_)
196 | cargo_metadata::Message::TextLine(_) 196 | cargo_metadata::Message::TextLine(_)
197 | cargo_metadata::Message::Unknown => {} 197 | _ => {}
198 }, 198 },
199 } 199 }
200 } 200 }
@@ -329,8 +329,7 @@ impl CargoActor {
329 // Skip certain kinds of messages to only spend time on what's useful 329 // Skip certain kinds of messages to only spend time on what's useful
330 match &message { 330 match &message {
331 cargo_metadata::Message::CompilerArtifact(artifact) if artifact.fresh => (), 331 cargo_metadata::Message::CompilerArtifact(artifact) if artifact.fresh => (),
332 cargo_metadata::Message::BuildScriptExecuted(_) 332 cargo_metadata::Message::BuildScriptExecuted(_) => (),
333 | cargo_metadata::Message::Unknown => (),
334 _ => self.sender.send(message).unwrap(), 333 _ => self.sender.send(message).unwrap(),
335 } 334 }
336 } 335 }
diff --git a/crates/hir/src/code_model.rs b/crates/hir/src/code_model.rs
index 00b0dc082..7656db974 100644
--- a/crates/hir/src/code_model.rs
+++ b/crates/hir/src/code_model.rs
@@ -14,7 +14,7 @@ use hir_def::{
14 per_ns::PerNs, 14 per_ns::PerNs,
15 resolver::{HasResolver, Resolver}, 15 resolver::{HasResolver, Resolver},
16 src::HasSource as _, 16 src::HasSource as _,
17 type_ref::{Mutability, TypeRef}, 17 type_ref::TypeRef,
18 AdtId, AssocContainerId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, 18 AdtId, AssocContainerId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId,
19 DefWithBodyId, EnumId, FunctionId, GenericDefId, HasModule, ImplId, LifetimeParamId, 19 DefWithBodyId, EnumId, FunctionId, GenericDefId, HasModule, ImplId, LifetimeParamId,
20 LocalEnumVariantId, LocalFieldId, Lookup, ModuleId, StaticId, StructId, TraitId, TypeAliasId, 20 LocalEnumVariantId, LocalFieldId, Lookup, ModuleId, StaticId, StructId, TraitId, TypeAliasId,
@@ -32,8 +32,8 @@ use hir_ty::{
32 method_resolution, 32 method_resolution,
33 traits::{FnTrait, Solution, SolutionVariables}, 33 traits::{FnTrait, Solution, SolutionVariables},
34 AliasTy, BoundVar, CallableDefId, CallableSig, Canonical, DebruijnIndex, GenericPredicate, 34 AliasTy, BoundVar, CallableDefId, CallableSig, Canonical, DebruijnIndex, GenericPredicate,
35 InEnvironment, Obligation, ProjectionPredicate, ProjectionTy, Scalar, Substs, TraitEnvironment, 35 InEnvironment, Mutability, Obligation, ProjectionPredicate, ProjectionTy, Scalar, Substs,
36 Ty, TyDefId, TyVariableKind, 36 TraitEnvironment, Ty, TyDefId, TyVariableKind,
37}; 37};
38use rustc_hash::FxHashSet; 38use rustc_hash::FxHashSet;
39use stdx::{format_to, impl_from}; 39use stdx::{format_to, impl_from};
@@ -836,7 +836,7 @@ pub enum Access {
836impl From<Mutability> for Access { 836impl From<Mutability> for Access {
837 fn from(mutability: Mutability) -> Access { 837 fn from(mutability: Mutability) -> Access {
838 match mutability { 838 match mutability {
839 Mutability::Shared => Access::Shared, 839 Mutability::Not => Access::Shared,
840 Mutability::Mut => Access::Exclusive, 840 Mutability::Mut => Access::Exclusive,
841 } 841 }
842 } 842 }
@@ -865,7 +865,10 @@ impl SelfParam {
865 .params 865 .params
866 .first() 866 .first()
867 .map(|param| match *param { 867 .map(|param| match *param {
868 TypeRef::Reference(.., mutability) => mutability.into(), 868 TypeRef::Reference(.., mutability) => match mutability {
869 hir_def::type_ref::Mutability::Shared => Access::Shared,
870 hir_def::type_ref::Mutability::Mut => Access::Exclusive,
871 },
869 _ => Access::Owned, 872 _ => Access::Owned,
870 }) 873 })
871 .unwrap_or(Access::Owned) 874 .unwrap_or(Access::Owned)
@@ -1697,7 +1700,7 @@ impl Type {
1697 1700
1698 pub fn is_packed(&self, db: &dyn HirDatabase) -> bool { 1701 pub fn is_packed(&self, db: &dyn HirDatabase) -> bool {
1699 let adt_id = match self.ty.value { 1702 let adt_id = match self.ty.value {
1700 Ty::Adt(adt_id, ..) => adt_id, 1703 Ty::Adt(hir_ty::AdtId(adt_id), ..) => adt_id,
1701 _ => return false, 1704 _ => return false,
1702 }; 1705 };
1703 1706
@@ -1725,8 +1728,8 @@ impl Type {
1725 1728
1726 pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(Field, Type)> { 1729 pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(Field, Type)> {
1727 let (variant_id, substs) = match self.ty.value { 1730 let (variant_id, substs) = match self.ty.value {
1728 Ty::Adt(AdtId::StructId(s), ref substs) => (s.into(), substs), 1731 Ty::Adt(hir_ty::AdtId(AdtId::StructId(s)), ref substs) => (s.into(), substs),
1729 Ty::Adt(AdtId::UnionId(u), ref substs) => (u.into(), substs), 1732 Ty::Adt(hir_ty::AdtId(AdtId::UnionId(u)), ref substs) => (u.into(), substs),
1730 _ => return Vec::new(), 1733 _ => return Vec::new(),
1731 }; 1734 };
1732 1735
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 769945c47..69fcdab07 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -33,11 +33,11 @@ mod has_source;
33pub use crate::{ 33pub use crate::{
34 attrs::{HasAttrs, Namespace}, 34 attrs::{HasAttrs, Namespace},
35 code_model::{ 35 code_model::{
36 Access, Adt, AsAssocItem, AssocItem, AssocItemContainer, Callable, CallableKind, Const, 36 Access, Adt, AsAssocItem, AssocItem, AssocItemContainer, BuiltinType, Callable,
37 ConstParam, Crate, CrateDependency, DefWithBody, Enum, Field, FieldSource, Function, 37 CallableKind, Const, ConstParam, Crate, CrateDependency, DefWithBody, Enum, Field,
38 GenericDef, GenericParam, HasVisibility, Impl, Label, LifetimeParam, Local, MacroDef, 38 FieldSource, Function, GenericDef, GenericParam, HasVisibility, Impl, Label, LifetimeParam,
39 Module, ModuleDef, ScopeDef, Static, Struct, Trait, Type, TypeAlias, TypeParam, Union, 39 Local, MacroDef, Module, ModuleDef, ScopeDef, Static, Struct, Trait, Type, TypeAlias,
40 Variant, VariantDef, 40 TypeParam, Union, Variant, VariantDef,
41 }, 41 },
42 has_source::HasSource, 42 has_source::HasSource,
43 semantics::{PathResolution, Semantics, SemanticsScope}, 43 semantics::{PathResolution, Semantics, SemanticsScope},
@@ -47,7 +47,6 @@ pub use hir_def::{
47 adt::StructKind, 47 adt::StructKind,
48 attr::{Attrs, Documentation}, 48 attr::{Attrs, Documentation},
49 body::scope::ExprScopes, 49 body::scope::ExprScopes,
50 builtin_type::BuiltinType,
51 find_path::PrefixKind, 50 find_path::PrefixKind,
52 import_map, 51 import_map,
53 item_scope::ItemInNs, 52 item_scope::ItemInNs,
diff --git a/crates/hir_def/src/attr.rs b/crates/hir_def/src/attr.rs
index fe4c3fa28..24ffa6c3a 100644
--- a/crates/hir_def/src/attr.rs
+++ b/crates/hir_def/src/attr.rs
@@ -367,7 +367,7 @@ fn inner_attributes(
367 // Excerpt from the reference: 367 // Excerpt from the reference:
368 // Block expressions accept outer and inner attributes, but only when they are the outer 368 // Block expressions accept outer and inner attributes, but only when they are the outer
369 // expression of an expression statement or the final expression of another block expression. 369 // expression of an expression statement or the final expression of another block expression.
370 ast::BlockExpr(it) => return None, 370 ast::BlockExpr(_it) => return None,
371 _ => return None, 371 _ => return None,
372 } 372 }
373 }; 373 };
diff --git a/crates/hir_def/src/find_path.rs b/crates/hir_def/src/find_path.rs
index 5e2a711b8..3a98ffbaa 100644
--- a/crates/hir_def/src/find_path.rs
+++ b/crates/hir_def/src/find_path.rs
@@ -1,5 +1,7 @@
1//! An algorithm to find a path to refer to a certain item. 1//! An algorithm to find a path to refer to a certain item.
2 2
3use std::iter;
4
3use hir_expand::name::{known, AsName, Name}; 5use hir_expand::name::{known, AsName, Name};
4use rustc_hash::FxHashSet; 6use rustc_hash::FxHashSet;
5use test_utils::mark; 7use test_utils::mark;
@@ -95,7 +97,7 @@ fn find_path_inner(
95 item: ItemInNs, 97 item: ItemInNs,
96 from: ModuleId, 98 from: ModuleId,
97 max_len: usize, 99 max_len: usize,
98 prefixed: Option<PrefixKind>, 100 mut prefixed: Option<PrefixKind>,
99) -> Option<ModPath> { 101) -> Option<ModPath> {
100 if max_len == 0 { 102 if max_len == 0 {
101 return None; 103 return None;
@@ -114,8 +116,9 @@ fn find_path_inner(
114 } 116 }
115 117
116 // - if the item is the crate root, return `crate` 118 // - if the item is the crate root, return `crate`
117 let root = def_map.module_id(def_map.root()); 119 let root = def_map.crate_root(db);
118 if item == ItemInNs::Types(ModuleDefId::ModuleId(root)) && def_map.block_id().is_none() { 120 if item == ItemInNs::Types(ModuleDefId::ModuleId(root)) && def_map.block_id().is_none() {
121 // FIXME: the `block_id()` check should be unnecessary, but affects the result
119 return Some(ModPath::from_segments(PathKind::Crate, Vec::new())); 122 return Some(ModPath::from_segments(PathKind::Crate, Vec::new()));
120 } 123 }
121 124
@@ -165,7 +168,7 @@ fn find_path_inner(
165 168
166 // - otherwise, look for modules containing (reexporting) it and import it from one of those 169 // - otherwise, look for modules containing (reexporting) it and import it from one of those
167 170
168 let crate_root = def_map.module_id(def_map.root()); 171 let crate_root = def_map.crate_root(db);
169 let crate_attrs = db.attrs(crate_root.into()); 172 let crate_attrs = db.attrs(crate_root.into());
170 let prefer_no_std = crate_attrs.by_key("no_std").exists(); 173 let prefer_no_std = crate_attrs.by_key("no_std").exists();
171 let mut best_path = None; 174 let mut best_path = None;
@@ -228,12 +231,16 @@ fn find_path_inner(
228 } 231 }
229 } 232 }
230 233
231 if let Some(mut prefix) = prefixed.map(PrefixKind::prefix) { 234 // If the item is declared inside a block expression, don't use a prefix, as we don't handle
232 if matches!(prefix, PathKind::Crate | PathKind::Super(0)) && def_map.block_id().is_some() { 235 // that correctly (FIXME).
233 // Inner items cannot be referred to via `crate::` or `self::` paths. 236 if let Some(item_module) = item.as_module_def_id().and_then(|did| did.module(db)) {
234 prefix = PathKind::Plain; 237 if item_module.def_map(db).block_id().is_some() && prefixed.is_some() {
238 mark::hit!(prefixed_in_block_expression);
239 prefixed = Some(PrefixKind::Plain);
235 } 240 }
241 }
236 242
243 if let Some(prefix) = prefixed.map(PrefixKind::prefix) {
237 best_path.or_else(|| { 244 best_path.or_else(|| {
238 scope_name.map(|scope_name| ModPath::from_segments(prefix, vec![scope_name])) 245 scope_name.map(|scope_name| ModPath::from_segments(prefix, vec![scope_name]))
239 }) 246 })
@@ -285,12 +292,12 @@ fn find_local_import_locations(
285 let data = &def_map[from.local_id]; 292 let data = &def_map[from.local_id];
286 let mut worklist = 293 let mut worklist =
287 data.children.values().map(|child| def_map.module_id(*child)).collect::<Vec<_>>(); 294 data.children.values().map(|child| def_map.module_id(*child)).collect::<Vec<_>>();
288 let mut parent = data.parent; 295 for ancestor in iter::successors(from.containing_module(db), |m| m.containing_module(db)) {
289 while let Some(p) = parent { 296 worklist.push(ancestor);
290 worklist.push(def_map.module_id(p));
291 parent = def_map[p].parent;
292 } 297 }
293 298
299 let def_map = def_map.crate_root(db).def_map(db);
300
294 let mut seen: FxHashSet<_> = FxHashSet::default(); 301 let mut seen: FxHashSet<_> = FxHashSet::default();
295 302
296 let mut locations = Vec::new(); 303 let mut locations = Vec::new();
@@ -301,7 +308,14 @@ fn find_local_import_locations(
301 308
302 let ext_def_map; 309 let ext_def_map;
303 let data = if module.krate == from.krate { 310 let data = if module.krate == from.krate {
304 &def_map[module.local_id] 311 if module.block.is_some() {
312 // Re-query the block's DefMap
313 ext_def_map = module.def_map(db);
314 &ext_def_map[module.local_id]
315 } else {
316 // Reuse the root DefMap
317 &def_map[module.local_id]
318 }
305 } else { 319 } else {
306 // The crate might reexport a module defined in another crate. 320 // The crate might reexport a module defined in another crate.
307 ext_def_map = module.def_map(db); 321 ext_def_map = module.def_map(db);
@@ -828,6 +842,7 @@ mod tests {
828 842
829 #[test] 843 #[test]
830 fn inner_items_from_inner_module() { 844 fn inner_items_from_inner_module() {
845 mark::check!(prefixed_in_block_expression);
831 check_found_path( 846 check_found_path(
832 r#" 847 r#"
833 fn main() { 848 fn main() {
@@ -847,26 +862,22 @@ mod tests {
847 } 862 }
848 863
849 #[test] 864 #[test]
850 #[ignore] 865 fn outer_items_with_inner_items_present() {
851 fn inner_items_from_parent_module() {
852 // FIXME: ItemTree currently associates all inner items with `main`. Luckily, this sort of
853 // code is very rare, so this isn't terrible.
854 // To fix it, we should probably build dedicated `ItemTree`s for inner items, and not store
855 // them in the file's main ItemTree. This would also allow us to stop parsing function
856 // bodies when we only want to compute the crate's main DefMap.
857 check_found_path( 866 check_found_path(
858 r#" 867 r#"
868 mod module {
869 pub struct CompleteMe;
870 }
871
859 fn main() { 872 fn main() {
860 struct Struct {} 873 fn inner() {}
861 mod module { 874 $0
862 $0
863 }
864 } 875 }
865 "#, 876 "#,
866 "super::Struct", 877 "module::CompleteMe",
867 "super::Struct", 878 "module::CompleteMe",
868 "super::Struct", 879 "crate::module::CompleteMe",
869 "super::Struct", 880 "self::module::CompleteMe",
870 ); 881 )
871 } 882 }
872} 883}
diff --git a/crates/hir_def/src/item_scope.rs b/crates/hir_def/src/item_scope.rs
index ee46c3330..4e5daa2ff 100644
--- a/crates/hir_def/src/item_scope.rs
+++ b/crates/hir_def/src/item_scope.rs
@@ -12,8 +12,8 @@ use stdx::format_to;
12use test_utils::mark; 12use test_utils::mark;
13 13
14use crate::{ 14use crate::{
15 db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, HasModule, ImplId, 15 db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, ImplId,
16 LocalModuleId, Lookup, MacroDefId, ModuleDefId, ModuleId, TraitId, 16 LocalModuleId, MacroDefId, ModuleDefId, ModuleId, TraitId,
17}; 17};
18 18
19#[derive(Copy, Clone)] 19#[derive(Copy, Clone)]
@@ -375,19 +375,9 @@ impl ItemInNs {
375 375
376 /// Returns the crate defining this item (or `None` if `self` is built-in). 376 /// Returns the crate defining this item (or `None` if `self` is built-in).
377 pub fn krate(&self, db: &dyn DefDatabase) -> Option<CrateId> { 377 pub fn krate(&self, db: &dyn DefDatabase) -> Option<CrateId> {
378 Some(match self { 378 match self {
379 ItemInNs::Types(did) | ItemInNs::Values(did) => match did { 379 ItemInNs::Types(did) | ItemInNs::Values(did) => did.module(db).map(|m| m.krate),
380 ModuleDefId::ModuleId(id) => id.krate, 380 ItemInNs::Macros(id) => Some(id.krate),
381 ModuleDefId::FunctionId(id) => id.lookup(db).module(db).krate, 381 }
382 ModuleDefId::AdtId(id) => id.module(db).krate,
383 ModuleDefId::EnumVariantId(id) => id.parent.lookup(db).container.module(db).krate,
384 ModuleDefId::ConstId(id) => id.lookup(db).container.module(db).krate,
385 ModuleDefId::StaticId(id) => id.lookup(db).container.module(db).krate,
386 ModuleDefId::TraitId(id) => id.lookup(db).container.module(db).krate,
387 ModuleDefId::TypeAliasId(id) => id.lookup(db).module(db).krate,
388 ModuleDefId::BuiltinType(_) => return None,
389 },
390 ItemInNs::Macros(id) => return Some(id.krate),
391 })
392 } 382 }
393} 383}
diff --git a/crates/hir_def/src/lib.rs b/crates/hir_def/src/lib.rs
index 6802bc250..4498d94bb 100644
--- a/crates/hir_def/src/lib.rs
+++ b/crates/hir_def/src/lib.rs
@@ -97,6 +97,10 @@ impl ModuleId {
97 pub fn krate(&self) -> CrateId { 97 pub fn krate(&self) -> CrateId {
98 self.krate 98 self.krate
99 } 99 }
100
101 pub fn containing_module(&self, db: &dyn db::DefDatabase) -> Option<ModuleId> {
102 self.def_map(db).containing_module(self.local_id)
103 }
100} 104}
101 105
102/// An ID of a module, **local** to a specific crate 106/// An ID of a module, **local** to a specific crate
@@ -529,6 +533,25 @@ impl HasModule for StaticLoc {
529 } 533 }
530} 534}
531 535
536impl ModuleDefId {
537 /// Returns the module containing `self` (or `self`, if `self` is itself a module).
538 ///
539 /// Returns `None` if `self` refers to a primitive type.
540 pub fn module(&self, db: &dyn db::DefDatabase) -> Option<ModuleId> {
541 Some(match self {
542 ModuleDefId::ModuleId(id) => *id,
543 ModuleDefId::FunctionId(id) => id.lookup(db).module(db),
544 ModuleDefId::AdtId(id) => id.module(db),
545 ModuleDefId::EnumVariantId(id) => id.parent.lookup(db).container.module(db),
546 ModuleDefId::ConstId(id) => id.lookup(db).container.module(db),
547 ModuleDefId::StaticId(id) => id.lookup(db).container.module(db),
548 ModuleDefId::TraitId(id) => id.lookup(db).container.module(db),
549 ModuleDefId::TypeAliasId(id) => id.lookup(db).module(db),
550 ModuleDefId::BuiltinType(_) => return None,
551 })
552 }
553}
554
532impl AttrDefId { 555impl AttrDefId {
533 pub fn krate(&self, db: &dyn db::DefDatabase) -> CrateId { 556 pub fn krate(&self, db: &dyn db::DefDatabase) -> CrateId {
534 match self { 557 match self {
diff --git a/crates/hir_def/src/nameres.rs b/crates/hir_def/src/nameres.rs
index 6a3456f2e..003d668ca 100644
--- a/crates/hir_def/src/nameres.rs
+++ b/crates/hir_def/src/nameres.rs
@@ -343,6 +343,18 @@ impl DefMap {
343 Some(self.block?.parent) 343 Some(self.block?.parent)
344 } 344 }
345 345
346 /// Returns the module containing `local_mod`, either the parent `mod`, or the module containing
347 /// the block, if `self` corresponds to a block expression.
348 pub fn containing_module(&self, local_mod: LocalModuleId) -> Option<ModuleId> {
349 match &self[local_mod].parent {
350 Some(parent) => Some(self.module_id(*parent)),
351 None => match &self.block {
352 Some(block) => Some(block.parent),
353 None => None,
354 },
355 }
356 }
357
346 // FIXME: this can use some more human-readable format (ideally, an IR 358 // FIXME: this can use some more human-readable format (ideally, an IR
347 // even), as this should be a great debugging aid. 359 // even), as this should be a great debugging aid.
348 pub fn dump(&self, db: &dyn DefDatabase) -> String { 360 pub fn dump(&self, db: &dyn DefDatabase) -> String {
diff --git a/crates/hir_def/src/path/lower.rs b/crates/hir_def/src/path/lower.rs
index a469546c1..505493a74 100644
--- a/crates/hir_def/src/path/lower.rs
+++ b/crates/hir_def/src/path/lower.rs
@@ -101,8 +101,12 @@ pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path>
101 break; 101 break;
102 } 102 }
103 ast::PathSegmentKind::SelfKw => { 103 ast::PathSegmentKind::SelfKw => {
104 kind = PathKind::Super(0); 104 // don't break out if `self` is the last segment of a path, this mean we got an
105 break; 105 // use tree like `foo::{self}` which we want to resolve as `foo`
106 if !segments.is_empty() {
107 kind = PathKind::Super(0);
108 break;
109 }
106 } 110 }
107 ast::PathSegmentKind::SuperKw => { 111 ast::PathSegmentKind::SuperKw => {
108 let nested_super_count = if let PathKind::Super(n) = kind { n } else { 0 }; 112 let nested_super_count = if let PathKind::Super(n) = kind { n } else { 0 };
@@ -117,6 +121,11 @@ pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path>
117 segments.reverse(); 121 segments.reverse();
118 generic_args.reverse(); 122 generic_args.reverse();
119 123
124 if segments.is_empty() && kind == PathKind::Plain && type_anchor.is_none() {
125 // plain empty paths don't exist, this means we got a single `self` segment as our path
126 kind = PathKind::Super(0);
127 }
128
120 // handle local_inner_macros : 129 // handle local_inner_macros :
121 // Basically, even in rustc it is quite hacky: 130 // Basically, even in rustc it is quite hacky:
122 // https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456 131 // https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456
diff --git a/crates/hir_ty/Cargo.toml b/crates/hir_ty/Cargo.toml
index 6ef9d1e7e..d1302d749 100644
--- a/crates/hir_ty/Cargo.toml
+++ b/crates/hir_ty/Cargo.toml
@@ -17,9 +17,9 @@ ena = "0.14.0"
17log = "0.4.8" 17log = "0.4.8"
18rustc-hash = "1.1.0" 18rustc-hash = "1.1.0"
19scoped-tls = "1" 19scoped-tls = "1"
20chalk-solve = { version = "0.58", default-features = false } 20chalk-solve = { version = "0.59", default-features = false }
21chalk-ir = "0.58" 21chalk-ir = "0.59"
22chalk-recursive = "0.58" 22chalk-recursive = "0.59"
23la-arena = { version = "0.2.0", path = "../../lib/arena" } 23la-arena = { version = "0.2.0", path = "../../lib/arena" }
24 24
25stdx = { path = "../stdx", version = "0.0.0" } 25stdx = { path = "../stdx", version = "0.0.0" }
diff --git a/crates/hir_ty/src/diagnostics/expr.rs b/crates/hir_ty/src/diagnostics/expr.rs
index 66a88e2b6..2751cd304 100644
--- a/crates/hir_ty/src/diagnostics/expr.rs
+++ b/crates/hir_ty/src/diagnostics/expr.rs
@@ -2,9 +2,7 @@
2 2
3use std::sync::Arc; 3use std::sync::Arc;
4 4
5use hir_def::{ 5use hir_def::{expr::Statement, path::path, resolver::HasResolver, AssocItemId, DefWithBodyId};
6 expr::Statement, path::path, resolver::HasResolver, AdtId, AssocItemId, DefWithBodyId,
7};
8use hir_expand::{diagnostics::DiagnosticSink, name}; 6use hir_expand::{diagnostics::DiagnosticSink, name};
9use rustc_hash::FxHashSet; 7use rustc_hash::FxHashSet;
10use syntax::{ast, AstPtr}; 8use syntax::{ast, AstPtr};
@@ -17,7 +15,7 @@ use crate::{
17 MissingPatFields, RemoveThisSemicolon, 15 MissingPatFields, RemoveThisSemicolon,
18 }, 16 },
19 utils::variant_data, 17 utils::variant_data,
20 InferenceResult, Ty, 18 AdtId, InferenceResult, Ty,
21}; 19};
22 20
23pub(crate) use hir_def::{ 21pub(crate) use hir_def::{
@@ -382,10 +380,14 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
382 }; 380 };
383 381
384 let (params, required) = match mismatch.expected { 382 let (params, required) = match mismatch.expected {
385 Ty::Adt(AdtId::EnumId(enum_id), ref parameters) if enum_id == core_result_enum => { 383 Ty::Adt(AdtId(hir_def::AdtId::EnumId(enum_id)), ref parameters)
384 if enum_id == core_result_enum =>
385 {
386 (parameters, "Ok".to_string()) 386 (parameters, "Ok".to_string())
387 } 387 }
388 Ty::Adt(AdtId::EnumId(enum_id), ref parameters) if enum_id == core_option_enum => { 388 Ty::Adt(AdtId(hir_def::AdtId::EnumId(enum_id)), ref parameters)
389 if enum_id == core_option_enum =>
390 {
389 (parameters, "Some".to_string()) 391 (parameters, "Some".to_string())
390 } 392 }
391 _ => return, 393 _ => return,
diff --git a/crates/hir_ty/src/diagnostics/match_check.rs b/crates/hir_ty/src/diagnostics/match_check.rs
index 86fee0050..04d39c571 100644
--- a/crates/hir_ty/src/diagnostics/match_check.rs
+++ b/crates/hir_ty/src/diagnostics/match_check.rs
@@ -222,12 +222,12 @@ use hir_def::{
222 adt::VariantData, 222 adt::VariantData,
223 body::Body, 223 body::Body,
224 expr::{Expr, Literal, Pat, PatId}, 224 expr::{Expr, Literal, Pat, PatId},
225 AdtId, EnumVariantId, StructId, VariantId, 225 EnumVariantId, StructId, VariantId,
226}; 226};
227use la_arena::Idx; 227use la_arena::Idx;
228use smallvec::{smallvec, SmallVec}; 228use smallvec::{smallvec, SmallVec};
229 229
230use crate::{db::HirDatabase, InferenceResult, Ty}; 230use crate::{db::HirDatabase, AdtId, InferenceResult, Ty};
231 231
232#[derive(Debug, Clone, Copy)] 232#[derive(Debug, Clone, Copy)]
233/// Either a pattern from the source code being analyzed, represented as 233/// Either a pattern from the source code being analyzed, represented as
@@ -627,7 +627,7 @@ pub(super) fn is_useful(
627 // - `!` type 627 // - `!` type
628 // In those cases, no match arm is useful. 628 // In those cases, no match arm is useful.
629 match cx.infer[cx.match_expr].strip_references() { 629 match cx.infer[cx.match_expr].strip_references() {
630 Ty::Adt(AdtId::EnumId(enum_id), ..) => { 630 Ty::Adt(AdtId(hir_def::AdtId::EnumId(enum_id)), ..) => {
631 if cx.db.enum_data(*enum_id).variants.is_empty() { 631 if cx.db.enum_data(*enum_id).variants.is_empty() {
632 return Ok(Usefulness::NotUseful); 632 return Ok(Usefulness::NotUseful);
633 } 633 }
diff --git a/crates/hir_ty/src/display.rs b/crates/hir_ty/src/display.rs
index f3a4333cb..a0882a2a1 100644
--- a/crates/hir_ty/src/display.rs
+++ b/crates/hir_ty/src/display.rs
@@ -2,18 +2,20 @@
2 2
3use std::{borrow::Cow, fmt}; 3use std::{borrow::Cow, fmt};
4 4
5use crate::{
6 db::HirDatabase, primitive, utils::generics, AliasTy, CallableDefId, CallableSig,
7 GenericPredicate, Lifetime, Obligation, OpaqueTy, OpaqueTyId, ProjectionTy, Scalar, Substs,
8 TraitRef, Ty,
9};
10use arrayvec::ArrayVec; 5use arrayvec::ArrayVec;
6use chalk_ir::Mutability;
11use hir_def::{ 7use hir_def::{
12 db::DefDatabase, find_path, generics::TypeParamProvenance, item_scope::ItemInNs, AdtId, 8 db::DefDatabase, find_path, generics::TypeParamProvenance, item_scope::ItemInNs,
13 AssocContainerId, HasModule, Lookup, ModuleId, TraitId, 9 AssocContainerId, HasModule, Lookup, ModuleId, TraitId,
14}; 10};
15use hir_expand::name::Name; 11use hir_expand::name::Name;
16 12
13use crate::{
14 db::HirDatabase, primitive, utils::generics, AdtId, AliasTy, CallableDefId, CallableSig,
15 GenericPredicate, Lifetime, Obligation, OpaqueTy, OpaqueTyId, ProjectionTy, Scalar, Substs,
16 TraitRef, Ty,
17};
18
17pub struct HirFormatter<'a> { 19pub struct HirFormatter<'a> {
18 pub db: &'a dyn HirDatabase, 20 pub db: &'a dyn HirDatabase,
19 fmt: &'a mut dyn fmt::Write, 21 fmt: &'a mut dyn fmt::Write,
@@ -291,9 +293,23 @@ impl HirDisplay for Ty {
291 t.into_displayable(f.db, f.max_size, f.omit_verbose_types, f.display_target); 293 t.into_displayable(f.db, f.max_size, f.omit_verbose_types, f.display_target);
292 294
293 if matches!(self, Ty::Raw(..)) { 295 if matches!(self, Ty::Raw(..)) {
294 write!(f, "*{}", m.as_keyword_for_ptr())?; 296 write!(
297 f,
298 "*{}",
299 match m {
300 Mutability::Not => "const ",
301 Mutability::Mut => "mut ",
302 }
303 )?;
295 } else { 304 } else {
296 write!(f, "&{}", m.as_keyword_for_ref())?; 305 write!(
306 f,
307 "&{}",
308 match m {
309 Mutability::Not => "",
310 Mutability::Mut => "mut ",
311 }
312 )?;
297 } 313 }
298 314
299 let datas; 315 let datas;
@@ -385,13 +401,13 @@ impl HirDisplay for Ty {
385 write!(f, " -> {}", ret_display)?; 401 write!(f, " -> {}", ret_display)?;
386 } 402 }
387 } 403 }
388 Ty::Adt(def_id, parameters) => { 404 Ty::Adt(AdtId(def_id), parameters) => {
389 match f.display_target { 405 match f.display_target {
390 DisplayTarget::Diagnostics | DisplayTarget::Test => { 406 DisplayTarget::Diagnostics | DisplayTarget::Test => {
391 let name = match *def_id { 407 let name = match *def_id {
392 AdtId::StructId(it) => f.db.struct_data(it).name.clone(), 408 hir_def::AdtId::StructId(it) => f.db.struct_data(it).name.clone(),
393 AdtId::UnionId(it) => f.db.union_data(it).name.clone(), 409 hir_def::AdtId::UnionId(it) => f.db.union_data(it).name.clone(),
394 AdtId::EnumId(it) => f.db.enum_data(it).name.clone(), 410 hir_def::AdtId::EnumId(it) => f.db.enum_data(it).name.clone(),
395 }; 411 };
396 write!(f, "{}", name)?; 412 write!(f, "{}", name)?;
397 } 413 }
diff --git a/crates/hir_ty/src/infer.rs b/crates/hir_ty/src/infer.rs
index 18a4f5e8a..4d771a91e 100644
--- a/crates/hir_ty/src/infer.rs
+++ b/crates/hir_ty/src/infer.rs
@@ -18,6 +18,7 @@ use std::mem;
18use std::ops::Index; 18use std::ops::Index;
19use std::sync::Arc; 19use std::sync::Arc;
20 20
21use chalk_ir::Mutability;
21use hir_def::{ 22use hir_def::{
22 body::Body, 23 body::Body,
23 data::{ConstData, FunctionData, StaticData}, 24 data::{ConstData, FunctionData, StaticData},
@@ -25,7 +26,7 @@ use hir_def::{
25 lang_item::LangItemTarget, 26 lang_item::LangItemTarget,
26 path::{path, Path}, 27 path::{path, Path},
27 resolver::{HasResolver, Resolver, TypeNs}, 28 resolver::{HasResolver, Resolver, TypeNs},
28 type_ref::{Mutability, TypeRef}, 29 type_ref::TypeRef,
29 AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, Lookup, TraitId, 30 AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, Lookup, TraitId,
30 TypeAliasId, VariantId, 31 TypeAliasId, VariantId,
31}; 32};
@@ -87,7 +88,7 @@ impl BindingMode {
87 fn convert(annotation: BindingAnnotation) -> BindingMode { 88 fn convert(annotation: BindingAnnotation) -> BindingMode {
88 match annotation { 89 match annotation {
89 BindingAnnotation::Unannotated | BindingAnnotation::Mutable => BindingMode::Move, 90 BindingAnnotation::Unannotated | BindingAnnotation::Mutable => BindingMode::Move,
90 BindingAnnotation::Ref => BindingMode::Ref(Mutability::Shared), 91 BindingAnnotation::Ref => BindingMode::Ref(Mutability::Not),
91 BindingAnnotation::RefMut => BindingMode::Ref(Mutability::Mut), 92 BindingAnnotation::RefMut => BindingMode::Ref(Mutability::Mut),
92 } 93 }
93 } 94 }
diff --git a/crates/hir_ty/src/infer/coerce.rs b/crates/hir_ty/src/infer/coerce.rs
index c33d8c61e..cf0a3add4 100644
--- a/crates/hir_ty/src/infer/coerce.rs
+++ b/crates/hir_ty/src/infer/coerce.rs
@@ -4,8 +4,8 @@
4//! 4//!
5//! See: https://doc.rust-lang.org/nomicon/coercions.html 5//! See: https://doc.rust-lang.org/nomicon/coercions.html
6 6
7use chalk_ir::TyVariableKind; 7use chalk_ir::{Mutability, TyVariableKind};
8use hir_def::{lang_item::LangItemTarget, type_ref::Mutability}; 8use hir_def::lang_item::LangItemTarget;
9use test_utils::mark; 9use test_utils::mark;
10 10
11use crate::{autoderef, traits::Solution, Obligation, Substs, TraitRef, Ty}; 11use crate::{autoderef, traits::Solution, Obligation, Substs, TraitRef, Ty};
@@ -73,20 +73,20 @@ impl<'a> InferenceContext<'a> {
73 match (&mut from_ty, to_ty) { 73 match (&mut from_ty, to_ty) {
74 // `*mut T` -> `*const T` 74 // `*mut T` -> `*const T`
75 // `&mut T` -> `&T` 75 // `&mut T` -> `&T`
76 (Ty::Raw(m1, ..), Ty::Raw(m2 @ Mutability::Shared, ..)) 76 (Ty::Raw(m1, ..), Ty::Raw(m2 @ Mutability::Not, ..))
77 | (Ty::Ref(m1, ..), Ty::Ref(m2 @ Mutability::Shared, ..)) => { 77 | (Ty::Ref(m1, ..), Ty::Ref(m2 @ Mutability::Not, ..)) => {
78 *m1 = *m2; 78 *m1 = *m2;
79 } 79 }
80 // `&T` -> `*const T` 80 // `&T` -> `*const T`
81 // `&mut T` -> `*mut T`/`*const T` 81 // `&mut T` -> `*mut T`/`*const T`
82 (Ty::Ref(.., substs), &Ty::Raw(m2 @ Mutability::Shared, ..)) 82 (Ty::Ref(.., substs), &Ty::Raw(m2 @ Mutability::Not, ..))
83 | (Ty::Ref(Mutability::Mut, substs), &Ty::Raw(m2, ..)) => { 83 | (Ty::Ref(Mutability::Mut, substs), &Ty::Raw(m2, ..)) => {
84 from_ty = Ty::Raw(m2, substs.clone()); 84 from_ty = Ty::Raw(m2, substs.clone());
85 } 85 }
86 86
87 // Illegal mutability conversion 87 // Illegal mutability conversion
88 (Ty::Raw(Mutability::Shared, ..), Ty::Raw(Mutability::Mut, ..)) 88 (Ty::Raw(Mutability::Not, ..), Ty::Raw(Mutability::Mut, ..))
89 | (Ty::Ref(Mutability::Shared, ..), Ty::Ref(Mutability::Mut, ..)) => return false, 89 | (Ty::Ref(Mutability::Not, ..), Ty::Ref(Mutability::Mut, ..)) => return false,
90 90
91 // `{function_type}` -> `fn()` 91 // `{function_type}` -> `fn()`
92 (Ty::FnDef(..), Ty::Function { .. }) => match from_ty.callable_sig(self.db) { 92 (Ty::FnDef(..), Ty::Function { .. }) => match from_ty.callable_sig(self.db) {
diff --git a/crates/hir_ty/src/infer/expr.rs b/crates/hir_ty/src/infer/expr.rs
index 7852b3d23..ec2c13154 100644
--- a/crates/hir_ty/src/infer/expr.rs
+++ b/crates/hir_ty/src/infer/expr.rs
@@ -3,23 +3,25 @@
3use std::iter::{repeat, repeat_with}; 3use std::iter::{repeat, repeat_with};
4use std::{mem, sync::Arc}; 4use std::{mem, sync::Arc};
5 5
6use chalk_ir::TyVariableKind; 6use chalk_ir::{Mutability, TyVariableKind};
7use hir_def::{ 7use hir_def::{
8 expr::{Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp}, 8 expr::{Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp},
9 path::{GenericArg, GenericArgs}, 9 path::{GenericArg, GenericArgs},
10 resolver::resolver_for_expr, 10 resolver::resolver_for_expr,
11 AdtId, AssocContainerId, FieldId, Lookup, 11 AssocContainerId, FieldId, Lookup,
12}; 12};
13use hir_expand::name::{name, Name}; 13use hir_expand::name::{name, Name};
14use syntax::ast::RangeOp; 14use syntax::ast::RangeOp;
15use test_utils::mark; 15use test_utils::mark;
16 16
17use crate::{ 17use crate::{
18 autoderef, method_resolution, op, 18 autoderef,
19 lower::lower_to_chalk_mutability,
20 method_resolution, op,
19 primitive::{self, UintTy}, 21 primitive::{self, UintTy},
20 traits::{FnTrait, InEnvironment}, 22 traits::{FnTrait, InEnvironment},
21 utils::{generics, variant_data, Generics}, 23 utils::{generics, variant_data, Generics},
22 Binders, CallableDefId, FnPointer, FnSig, Mutability, Obligation, OpaqueTyId, Rawness, Scalar, 24 AdtId, Binders, CallableDefId, FnPointer, FnSig, Obligation, OpaqueTyId, Rawness, Scalar,
23 Substs, TraitRef, Ty, 25 Substs, TraitRef, Ty,
24}; 26};
25 27
@@ -427,14 +429,14 @@ impl<'a> InferenceContext<'a> {
427 Ty::Tuple(_, substs) => { 429 Ty::Tuple(_, substs) => {
428 name.as_tuple_index().and_then(|idx| substs.0.get(idx).cloned()) 430 name.as_tuple_index().and_then(|idx| substs.0.get(idx).cloned())
429 } 431 }
430 Ty::Adt(AdtId::StructId(s), parameters) => { 432 Ty::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => {
431 self.db.struct_data(s).variant_data.field(name).map(|local_id| { 433 self.db.struct_data(s).variant_data.field(name).map(|local_id| {
432 let field = FieldId { parent: s.into(), local_id }; 434 let field = FieldId { parent: s.into(), local_id };
433 self.write_field_resolution(tgt_expr, field); 435 self.write_field_resolution(tgt_expr, field);
434 self.db.field_types(s.into())[field.local_id].clone().subst(&parameters) 436 self.db.field_types(s.into())[field.local_id].clone().subst(&parameters)
435 }) 437 })
436 } 438 }
437 Ty::Adt(AdtId::UnionId(u), parameters) => { 439 Ty::Adt(AdtId(hir_def::AdtId::UnionId(u)), parameters) => {
438 self.db.union_data(u).variant_data.field(name).map(|local_id| { 440 self.db.union_data(u).variant_data.field(name).map(|local_id| {
439 let field = FieldId { parent: u.into(), local_id }; 441 let field = FieldId { parent: u.into(), local_id };
440 self.write_field_resolution(tgt_expr, field); 442 self.write_field_resolution(tgt_expr, field);
@@ -462,10 +464,11 @@ impl<'a> InferenceContext<'a> {
462 cast_ty 464 cast_ty
463 } 465 }
464 Expr::Ref { expr, rawness, mutability } => { 466 Expr::Ref { expr, rawness, mutability } => {
467 let mutability = lower_to_chalk_mutability(*mutability);
465 let expectation = if let Some((exp_inner, exp_rawness, exp_mutability)) = 468 let expectation = if let Some((exp_inner, exp_rawness, exp_mutability)) =
466 &expected.ty.as_reference_or_ptr() 469 &expected.ty.as_reference_or_ptr()
467 { 470 {
468 if *exp_mutability == Mutability::Mut && *mutability == Mutability::Shared { 471 if *exp_mutability == Mutability::Mut && mutability == Mutability::Not {
469 // FIXME: throw type error - expected mut reference but found shared ref, 472 // FIXME: throw type error - expected mut reference but found shared ref,
470 // which cannot be coerced 473 // which cannot be coerced
471 } 474 }
@@ -479,8 +482,8 @@ impl<'a> InferenceContext<'a> {
479 }; 482 };
480 let inner_ty = self.infer_expr_inner(*expr, &expectation); 483 let inner_ty = self.infer_expr_inner(*expr, &expectation);
481 match rawness { 484 match rawness {
482 Rawness::RawPtr => Ty::Raw(*mutability, Substs::single(inner_ty)), 485 Rawness::RawPtr => Ty::Raw(mutability, Substs::single(inner_ty)),
483 Rawness::Ref => Ty::Ref(*mutability, Substs::single(inner_ty)), 486 Rawness::Ref => Ty::Ref(mutability, Substs::single(inner_ty)),
484 } 487 }
485 } 488 }
486 Expr::Box { expr } => { 489 Expr::Box { expr } => {
@@ -495,7 +498,7 @@ impl<'a> InferenceContext<'a> {
495 _ => (), 498 _ => (),
496 } 499 }
497 sb = sb.fill(repeat_with(|| self.table.new_type_var())); 500 sb = sb.fill(repeat_with(|| self.table.new_type_var()));
498 Ty::Adt(box_, sb.build()) 501 Ty::adt_ty(box_, sb.build())
499 } else { 502 } else {
500 Ty::Unknown 503 Ty::Unknown
501 } 504 }
@@ -583,31 +586,31 @@ impl<'a> InferenceContext<'a> {
583 let rhs_ty = rhs.map(|e| self.infer_expr(e, &rhs_expect)); 586 let rhs_ty = rhs.map(|e| self.infer_expr(e, &rhs_expect));
584 match (range_type, lhs_ty, rhs_ty) { 587 match (range_type, lhs_ty, rhs_ty) {
585 (RangeOp::Exclusive, None, None) => match self.resolve_range_full() { 588 (RangeOp::Exclusive, None, None) => match self.resolve_range_full() {
586 Some(adt) => Ty::Adt(adt, Substs::empty()), 589 Some(adt) => Ty::adt_ty(adt, Substs::empty()),
587 None => Ty::Unknown, 590 None => Ty::Unknown,
588 }, 591 },
589 (RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() { 592 (RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() {
590 Some(adt) => Ty::Adt(adt, Substs::single(ty)), 593 Some(adt) => Ty::adt_ty(adt, Substs::single(ty)),
591 None => Ty::Unknown, 594 None => Ty::Unknown,
592 }, 595 },
593 (RangeOp::Inclusive, None, Some(ty)) => { 596 (RangeOp::Inclusive, None, Some(ty)) => {
594 match self.resolve_range_to_inclusive() { 597 match self.resolve_range_to_inclusive() {
595 Some(adt) => Ty::Adt(adt, Substs::single(ty)), 598 Some(adt) => Ty::adt_ty(adt, Substs::single(ty)),
596 None => Ty::Unknown, 599 None => Ty::Unknown,
597 } 600 }
598 } 601 }
599 (RangeOp::Exclusive, Some(_), Some(ty)) => match self.resolve_range() { 602 (RangeOp::Exclusive, Some(_), Some(ty)) => match self.resolve_range() {
600 Some(adt) => Ty::Adt(adt, Substs::single(ty)), 603 Some(adt) => Ty::adt_ty(adt, Substs::single(ty)),
601 None => Ty::Unknown, 604 None => Ty::Unknown,
602 }, 605 },
603 (RangeOp::Inclusive, Some(_), Some(ty)) => { 606 (RangeOp::Inclusive, Some(_), Some(ty)) => {
604 match self.resolve_range_inclusive() { 607 match self.resolve_range_inclusive() {
605 Some(adt) => Ty::Adt(adt, Substs::single(ty)), 608 Some(adt) => Ty::adt_ty(adt, Substs::single(ty)),
606 None => Ty::Unknown, 609 None => Ty::Unknown,
607 } 610 }
608 } 611 }
609 (RangeOp::Exclusive, Some(ty), None) => match self.resolve_range_from() { 612 (RangeOp::Exclusive, Some(ty), None) => match self.resolve_range_from() {
610 Some(adt) => Ty::Adt(adt, Substs::single(ty)), 613 Some(adt) => Ty::adt_ty(adt, Substs::single(ty)),
611 None => Ty::Unknown, 614 None => Ty::Unknown,
612 }, 615 },
613 (RangeOp::Inclusive, _, None) => Ty::Unknown, 616 (RangeOp::Inclusive, _, None) => Ty::Unknown,
@@ -684,11 +687,11 @@ impl<'a> InferenceContext<'a> {
684 } 687 }
685 Expr::Literal(lit) => match lit { 688 Expr::Literal(lit) => match lit {
686 Literal::Bool(..) => Ty::Scalar(Scalar::Bool), 689 Literal::Bool(..) => Ty::Scalar(Scalar::Bool),
687 Literal::String(..) => Ty::Ref(Mutability::Shared, Substs::single(Ty::Str)), 690 Literal::String(..) => Ty::Ref(Mutability::Not, Substs::single(Ty::Str)),
688 Literal::ByteString(..) => { 691 Literal::ByteString(..) => {
689 let byte_type = Ty::Scalar(Scalar::Uint(UintTy::U8)); 692 let byte_type = Ty::Scalar(Scalar::Uint(UintTy::U8));
690 let array_type = Ty::Array(Substs::single(byte_type)); 693 let array_type = Ty::Array(Substs::single(byte_type));
691 Ty::Ref(Mutability::Shared, Substs::single(array_type)) 694 Ty::Ref(Mutability::Not, Substs::single(array_type))
692 } 695 }
693 Literal::Char(..) => Ty::Scalar(Scalar::Char), 696 Literal::Char(..) => Ty::Scalar(Scalar::Char),
694 Literal::Int(_v, ty) => match ty { 697 Literal::Int(_v, ty) => match ty {
diff --git a/crates/hir_ty/src/infer/pat.rs b/crates/hir_ty/src/infer/pat.rs
index a318e47f3..987793e2e 100644
--- a/crates/hir_ty/src/infer/pat.rs
+++ b/crates/hir_ty/src/infer/pat.rs
@@ -3,17 +3,17 @@
3use std::iter::repeat; 3use std::iter::repeat;
4use std::sync::Arc; 4use std::sync::Arc;
5 5
6use chalk_ir::Mutability;
6use hir_def::{ 7use hir_def::{
7 expr::{BindingAnnotation, Expr, Literal, Pat, PatId, RecordFieldPat}, 8 expr::{BindingAnnotation, Expr, Literal, Pat, PatId, RecordFieldPat},
8 path::Path, 9 path::Path,
9 type_ref::Mutability,
10 FieldId, 10 FieldId,
11}; 11};
12use hir_expand::name::Name; 12use hir_expand::name::Name;
13use test_utils::mark; 13use test_utils::mark;
14 14
15use super::{BindingMode, Expectation, InferenceContext}; 15use super::{BindingMode, Expectation, InferenceContext};
16use crate::{utils::variant_data, Substs, Ty}; 16use crate::{lower::lower_to_chalk_mutability, utils::variant_data, Substs, Ty};
17 17
18impl<'a> InferenceContext<'a> { 18impl<'a> InferenceContext<'a> {
19 fn infer_tuple_struct_pat( 19 fn infer_tuple_struct_pat(
@@ -103,7 +103,7 @@ impl<'a> InferenceContext<'a> {
103 expected = inner; 103 expected = inner;
104 default_bm = match default_bm { 104 default_bm = match default_bm {
105 BindingMode::Move => BindingMode::Ref(mutability), 105 BindingMode::Move => BindingMode::Ref(mutability),
106 BindingMode::Ref(Mutability::Shared) => BindingMode::Ref(Mutability::Shared), 106 BindingMode::Ref(Mutability::Not) => BindingMode::Ref(Mutability::Not),
107 BindingMode::Ref(Mutability::Mut) => BindingMode::Ref(mutability), 107 BindingMode::Ref(Mutability::Mut) => BindingMode::Ref(mutability),
108 } 108 }
109 } 109 }
@@ -152,9 +152,10 @@ impl<'a> InferenceContext<'a> {
152 } 152 }
153 } 153 }
154 Pat::Ref { pat, mutability } => { 154 Pat::Ref { pat, mutability } => {
155 let mutability = lower_to_chalk_mutability(*mutability);
155 let expectation = match expected.as_reference() { 156 let expectation = match expected.as_reference() {
156 Some((inner_ty, exp_mut)) => { 157 Some((inner_ty, exp_mut)) => {
157 if *mutability != exp_mut { 158 if mutability != exp_mut {
158 // FIXME: emit type error? 159 // FIXME: emit type error?
159 } 160 }
160 inner_ty 161 inner_ty
@@ -162,7 +163,7 @@ impl<'a> InferenceContext<'a> {
162 _ => &Ty::Unknown, 163 _ => &Ty::Unknown,
163 }; 164 };
164 let subty = self.infer_pat(*pat, expectation, default_bm); 165 let subty = self.infer_pat(*pat, expectation, default_bm);
165 Ty::Ref(*mutability, Substs::single(subty)) 166 Ty::Ref(mutability, Substs::single(subty))
166 } 167 }
167 Pat::TupleStruct { path: p, args: subpats, ellipsis } => self.infer_tuple_struct_pat( 168 Pat::TupleStruct { path: p, args: subpats, ellipsis } => self.infer_tuple_struct_pat(
168 p.as_ref(), 169 p.as_ref(),
@@ -236,7 +237,7 @@ impl<'a> InferenceContext<'a> {
236 }; 237 };
237 238
238 let inner_ty = self.infer_pat(*inner, inner_expected, default_bm); 239 let inner_ty = self.infer_pat(*inner, inner_expected, default_bm);
239 Ty::Adt(box_adt, Substs::single(inner_ty)) 240 Ty::adt_ty(box_adt, Substs::single(inner_ty))
240 } 241 }
241 None => Ty::Unknown, 242 None => Ty::Unknown,
242 }, 243 },
diff --git a/crates/hir_ty/src/lib.rs b/crates/hir_ty/src/lib.rs
index 9bcaf6fa7..e77f24e4e 100644
--- a/crates/hir_ty/src/lib.rs
+++ b/crates/hir_ty/src/lib.rs
@@ -27,11 +27,9 @@ use std::{iter, mem, ops::Deref, sync::Arc};
27 27
28use base_db::salsa; 28use base_db::salsa;
29use hir_def::{ 29use hir_def::{
30 builtin_type::BuiltinType, 30 builtin_type::BuiltinType, expr::ExprId, type_ref::Rawness, AssocContainerId, DefWithBodyId,
31 expr::ExprId, 31 FunctionId, GenericDefId, HasModule, LifetimeParamId, Lookup, TraitId, TypeAliasId,
32 type_ref::{Mutability, Rawness}, 32 TypeParamId,
33 AdtId, AssocContainerId, DefWithBodyId, FunctionId, GenericDefId, HasModule, LifetimeParamId,
34 Lookup, TraitId, TypeAliasId, TypeParamId,
35}; 33};
36use itertools::Itertools; 34use itertools::Itertools;
37 35
@@ -49,7 +47,9 @@ pub use lower::{
49}; 47};
50pub use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment}; 48pub use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment};
51 49
52pub use chalk_ir::{BoundVar, DebruijnIndex, Scalar, TyVariableKind}; 50pub use chalk_ir::{AdtId, BoundVar, DebruijnIndex, Mutability, Scalar, TyVariableKind};
51
52pub(crate) use crate::traits::chalk::Interner;
53 53
54#[derive(Clone, PartialEq, Eq, Debug, Hash)] 54#[derive(Clone, PartialEq, Eq, Debug, Hash)]
55pub enum Lifetime { 55pub enum Lifetime {
@@ -133,7 +133,7 @@ pub enum AliasTy {
133#[derive(Clone, PartialEq, Eq, Debug, Hash)] 133#[derive(Clone, PartialEq, Eq, Debug, Hash)]
134pub enum Ty { 134pub enum Ty {
135 /// Structures, enumerations and unions. 135 /// Structures, enumerations and unions.
136 Adt(AdtId, Substs), 136 Adt(AdtId<Interner>, Substs),
137 137
138 /// Represents an associated item like `Iterator::Item`. This is used 138 /// Represents an associated item like `Iterator::Item`. This is used
139 /// when we have tried to normalize a projection like `T::Item` but 139 /// when we have tried to normalize a projection like `T::Item` but
@@ -604,6 +604,10 @@ impl Ty {
604 Ty::Tuple(0, Substs::empty()) 604 Ty::Tuple(0, Substs::empty())
605 } 605 }
606 606
607 pub fn adt_ty(adt: hir_def::AdtId, substs: Substs) -> Ty {
608 Ty::Adt(AdtId(adt), substs)
609 }
610
607 pub fn fn_ptr(sig: CallableSig) -> Self { 611 pub fn fn_ptr(sig: CallableSig) -> Self {
608 Ty::Function(FnPointer { 612 Ty::Function(FnPointer {
609 num_args: sig.params().len(), 613 num_args: sig.params().len(),
@@ -652,9 +656,9 @@ impl Ty {
652 t 656 t
653 } 657 }
654 658
655 pub fn as_adt(&self) -> Option<(AdtId, &Substs)> { 659 pub fn as_adt(&self) -> Option<(hir_def::AdtId, &Substs)> {
656 match self { 660 match self {
657 Ty::Adt(adt_def, parameters) => Some((*adt_def, parameters)), 661 Ty::Adt(AdtId(adt), parameters) => Some((*adt, parameters)),
658 _ => None, 662 _ => None,
659 } 663 }
660 } 664 }
@@ -668,7 +672,7 @@ impl Ty {
668 672
669 pub fn as_generic_def(&self) -> Option<GenericDefId> { 673 pub fn as_generic_def(&self) -> Option<GenericDefId> {
670 match *self { 674 match *self {
671 Ty::Adt(adt, ..) => Some(adt.into()), 675 Ty::Adt(AdtId(adt), ..) => Some(adt.into()),
672 Ty::FnDef(callable, ..) => Some(callable.into()), 676 Ty::FnDef(callable, ..) => Some(callable.into()),
673 Ty::AssociatedType(type_alias, ..) => Some(type_alias.into()), 677 Ty::AssociatedType(type_alias, ..) => Some(type_alias.into()),
674 Ty::ForeignType(type_alias, ..) => Some(type_alias.into()), 678 Ty::ForeignType(type_alias, ..) => Some(type_alias.into()),
diff --git a/crates/hir_ty/src/lower.rs b/crates/hir_ty/src/lower.rs
index ca06c9fe2..5fe5b8ad1 100644
--- a/crates/hir_ty/src/lower.rs
+++ b/crates/hir_ty/src/lower.rs
@@ -8,6 +8,7 @@
8use std::{iter, sync::Arc}; 8use std::{iter, sync::Arc};
9 9
10use base_db::CrateId; 10use base_db::CrateId;
11use chalk_ir::Mutability;
11use hir_def::{ 12use hir_def::{
12 adt::StructKind, 13 adt::StructKind,
13 builtin_type::BuiltinType, 14 builtin_type::BuiltinType,
@@ -157,7 +158,7 @@ impl Ty {
157 } 158 }
158 TypeRef::RawPtr(inner, mutability) => { 159 TypeRef::RawPtr(inner, mutability) => {
159 let inner_ty = Ty::from_hir(ctx, inner); 160 let inner_ty = Ty::from_hir(ctx, inner);
160 Ty::Raw(*mutability, Substs::single(inner_ty)) 161 Ty::Raw(lower_to_chalk_mutability(*mutability), Substs::single(inner_ty))
161 } 162 }
162 TypeRef::Array(inner) => { 163 TypeRef::Array(inner) => {
163 let inner_ty = Ty::from_hir(ctx, inner); 164 let inner_ty = Ty::from_hir(ctx, inner);
@@ -169,7 +170,7 @@ impl Ty {
169 } 170 }
170 TypeRef::Reference(inner, _, mutability) => { 171 TypeRef::Reference(inner, _, mutability) => {
171 let inner_ty = Ty::from_hir(ctx, inner); 172 let inner_ty = Ty::from_hir(ctx, inner);
172 Ty::Ref(*mutability, Substs::single(inner_ty)) 173 Ty::Ref(lower_to_chalk_mutability(*mutability), Substs::single(inner_ty))
173 } 174 }
174 TypeRef::Placeholder => Ty::Unknown, 175 TypeRef::Placeholder => Ty::Unknown,
175 TypeRef::Fn(params, is_varargs) => { 176 TypeRef::Fn(params, is_varargs) => {
@@ -1099,7 +1100,7 @@ fn type_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -
1099fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> { 1100fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
1100 let generics = generics(db.upcast(), adt.into()); 1101 let generics = generics(db.upcast(), adt.into());
1101 let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST); 1102 let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST);
1102 Binders::new(substs.len(), Ty::Adt(adt, substs)) 1103 Binders::new(substs.len(), Ty::adt_ty(adt, substs))
1103} 1104}
1104 1105
1105fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> { 1106fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
@@ -1259,3 +1260,10 @@ pub(crate) fn return_type_impl_traits(
1259 Some(Arc::new(Binders::new(num_binders, return_type_impl_traits))) 1260 Some(Arc::new(Binders::new(num_binders, return_type_impl_traits)))
1260 } 1261 }
1261} 1262}
1263
1264pub(crate) fn lower_to_chalk_mutability(m: hir_def::type_ref::Mutability) -> Mutability {
1265 match m {
1266 hir_def::type_ref::Mutability::Shared => Mutability::Not,
1267 hir_def::type_ref::Mutability::Mut => Mutability::Mut,
1268 }
1269}
diff --git a/crates/hir_ty/src/method_resolution.rs b/crates/hir_ty/src/method_resolution.rs
index dd5109d4e..dfcf346fb 100644
--- a/crates/hir_ty/src/method_resolution.rs
+++ b/crates/hir_ty/src/method_resolution.rs
@@ -6,9 +6,10 @@ use std::{iter, sync::Arc};
6 6
7use arrayvec::ArrayVec; 7use arrayvec::ArrayVec;
8use base_db::CrateId; 8use base_db::CrateId;
9use chalk_ir::Mutability;
9use hir_def::{ 10use hir_def::{
10 lang_item::LangItemTarget, type_ref::Mutability, AdtId, AssocContainerId, AssocItemId, 11 lang_item::LangItemTarget, AssocContainerId, AssocItemId, FunctionId, GenericDefId, HasModule,
11 FunctionId, GenericDefId, HasModule, ImplId, Lookup, ModuleId, TraitId, TypeAliasId, 12 ImplId, Lookup, ModuleId, TraitId, TypeAliasId,
12}; 13};
13use hir_expand::name::Name; 14use hir_expand::name::Name;
14use rustc_hash::{FxHashMap, FxHashSet}; 15use rustc_hash::{FxHashMap, FxHashSet};
@@ -18,8 +19,8 @@ use crate::{
18 db::HirDatabase, 19 db::HirDatabase,
19 primitive::{self, FloatTy, IntTy, UintTy}, 20 primitive::{self, FloatTy, IntTy, UintTy},
20 utils::all_super_traits, 21 utils::all_super_traits,
21 Canonical, DebruijnIndex, FnPointer, FnSig, InEnvironment, Scalar, Substs, TraitEnvironment, 22 AdtId, Canonical, DebruijnIndex, FnPointer, FnSig, InEnvironment, Scalar, Substs,
22 TraitRef, Ty, TypeWalk, 23 TraitEnvironment, TraitRef, Ty, TypeWalk,
23}; 24};
24 25
25/// This is used as a key for indexing impls. 26/// This is used as a key for indexing impls.
@@ -31,7 +32,7 @@ pub enum TyFingerprint {
31 Never, 32 Never,
32 RawPtr(Mutability), 33 RawPtr(Mutability),
33 Scalar(Scalar), 34 Scalar(Scalar),
34 Adt(AdtId), 35 Adt(hir_def::AdtId),
35 Dyn(TraitId), 36 Dyn(TraitId),
36 Tuple(usize), 37 Tuple(usize),
37 ForeignType(TypeAliasId), 38 ForeignType(TypeAliasId),
@@ -49,7 +50,7 @@ impl TyFingerprint {
49 &Ty::Slice(..) => TyFingerprint::Slice, 50 &Ty::Slice(..) => TyFingerprint::Slice,
50 &Ty::Array(..) => TyFingerprint::Array, 51 &Ty::Array(..) => TyFingerprint::Array,
51 &Ty::Scalar(scalar) => TyFingerprint::Scalar(scalar), 52 &Ty::Scalar(scalar) => TyFingerprint::Scalar(scalar),
52 &Ty::Adt(adt, _) => TyFingerprint::Adt(adt), 53 &Ty::Adt(AdtId(adt), _) => TyFingerprint::Adt(adt),
53 &Ty::Tuple(cardinality, _) => TyFingerprint::Tuple(cardinality), 54 &Ty::Tuple(cardinality, _) => TyFingerprint::Tuple(cardinality),
54 &Ty::Raw(mutability, ..) => TyFingerprint::RawPtr(mutability), 55 &Ty::Raw(mutability, ..) => TyFingerprint::RawPtr(mutability),
55 &Ty::ForeignType(alias_id, ..) => TyFingerprint::ForeignType(alias_id), 56 &Ty::ForeignType(alias_id, ..) => TyFingerprint::ForeignType(alias_id),
@@ -230,7 +231,7 @@ impl Ty {
230 let mod_to_crate_ids = |module: ModuleId| Some(std::iter::once(module.krate()).collect()); 231 let mod_to_crate_ids = |module: ModuleId| Some(std::iter::once(module.krate()).collect());
231 232
232 let lang_item_targets = match self { 233 let lang_item_targets = match self {
233 Ty::Adt(def_id, _) => { 234 Ty::Adt(AdtId(def_id), _) => {
234 return mod_to_crate_ids(def_id.module(db.upcast())); 235 return mod_to_crate_ids(def_id.module(db.upcast()));
235 } 236 }
236 Ty::ForeignType(type_alias_id) => { 237 Ty::ForeignType(type_alias_id) => {
@@ -251,7 +252,7 @@ impl Ty {
251 } 252 }
252 Ty::Str => lang_item_crate!("str_alloc", "str"), 253 Ty::Str => lang_item_crate!("str_alloc", "str"),
253 Ty::Slice(_) => lang_item_crate!("slice_alloc", "slice"), 254 Ty::Slice(_) => lang_item_crate!("slice_alloc", "slice"),
254 Ty::Raw(Mutability::Shared, _) => lang_item_crate!("const_ptr"), 255 Ty::Raw(Mutability::Not, _) => lang_item_crate!("const_ptr"),
255 Ty::Raw(Mutability::Mut, _) => lang_item_crate!("mut_ptr"), 256 Ty::Raw(Mutability::Mut, _) => lang_item_crate!("mut_ptr"),
256 Ty::Dyn(_) => { 257 Ty::Dyn(_) => {
257 return self.dyn_trait().and_then(|trait_| { 258 return self.dyn_trait().and_then(|trait_| {
@@ -429,7 +430,7 @@ fn iterate_method_candidates_with_autoref(
429 } 430 }
430 let refed = Canonical { 431 let refed = Canonical {
431 kinds: deref_chain[0].kinds.clone(), 432 kinds: deref_chain[0].kinds.clone(),
432 value: Ty::Ref(Mutability::Shared, Substs::single(deref_chain[0].value.clone())), 433 value: Ty::Ref(Mutability::Not, Substs::single(deref_chain[0].value.clone())),
433 }; 434 };
434 if iterate_method_candidates_by_receiver( 435 if iterate_method_candidates_by_receiver(
435 &refed, 436 &refed,
diff --git a/crates/hir_ty/src/traits/chalk.rs b/crates/hir_ty/src/traits/chalk.rs
index e513fa8f4..4378a9723 100644
--- a/crates/hir_ty/src/traits/chalk.rs
+++ b/crates/hir_ty/src/traits/chalk.rs
@@ -315,9 +315,8 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
315 let id = from_chalk(self.db, trait_id); 315 let id = from_chalk(self.db, trait_id);
316 self.db.trait_data(id).name.to_string() 316 self.db.trait_data(id).name.to_string()
317 } 317 }
318 fn adt_name(&self, adt_id: chalk_ir::AdtId<Interner>) -> String { 318 fn adt_name(&self, chalk_ir::AdtId(adt_id): AdtId) -> String {
319 let id = from_chalk(self.db, adt_id); 319 match adt_id {
320 match id {
321 hir_def::AdtId::StructId(id) => self.db.struct_data(id).name.to_string(), 320 hir_def::AdtId::StructId(id) => self.db.struct_data(id).name.to_string(),
322 hir_def::AdtId::EnumId(id) => self.db.enum_data(id).name.to_string(), 321 hir_def::AdtId::EnumId(id) => self.db.enum_data(id).name.to_string(),
323 hir_def::AdtId::UnionId(id) => self.db.union_data(id).name.to_string(), 322 hir_def::AdtId::UnionId(id) => self.db.union_data(id).name.to_string(),
@@ -488,8 +487,8 @@ pub(crate) fn struct_datum_query(
488 struct_id: AdtId, 487 struct_id: AdtId,
489) -> Arc<StructDatum> { 488) -> Arc<StructDatum> {
490 debug!("struct_datum {:?}", struct_id); 489 debug!("struct_datum {:?}", struct_id);
491 let adt_id = from_chalk(db, struct_id); 490 let type_ctor = Ty::Adt(struct_id, Substs::empty());
492 let type_ctor = Ty::Adt(adt_id, Substs::empty()); 491 let chalk_ir::AdtId(adt_id) = struct_id;
493 debug!("struct {:?} = {:?}", struct_id, type_ctor); 492 debug!("struct {:?} = {:?}", struct_id, type_ctor);
494 let num_params = generics(db.upcast(), adt_id.into()).len(); 493 let num_params = generics(db.upcast(), adt_id.into()).len();
495 let upstream = adt_id.module(db.upcast()).krate() != krate; 494 let upstream = adt_id.module(db.upcast()).krate() != krate;
@@ -684,10 +683,9 @@ pub(crate) fn fn_def_variance_query(
684pub(crate) fn adt_variance_query( 683pub(crate) fn adt_variance_query(
685 db: &dyn HirDatabase, 684 db: &dyn HirDatabase,
686 _krate: CrateId, 685 _krate: CrateId,
687 adt_id: AdtId, 686 chalk_ir::AdtId(adt_id): AdtId,
688) -> Variances { 687) -> Variances {
689 let adt: crate::AdtId = from_chalk(db, adt_id); 688 let generic_params = generics(db.upcast(), adt_id.into());
690 let generic_params = generics(db.upcast(), adt.into());
691 Variances::from_iter( 689 Variances::from_iter(
692 &Interner, 690 &Interner,
693 std::iter::repeat(chalk_ir::Variance::Invariant).take(generic_params.len()), 691 std::iter::repeat(chalk_ir::Variance::Invariant).take(generic_params.len()),
diff --git a/crates/hir_ty/src/traits/chalk/mapping.rs b/crates/hir_ty/src/traits/chalk/mapping.rs
index 6e6055d80..3a08b67e9 100644
--- a/crates/hir_ty/src/traits/chalk/mapping.rs
+++ b/crates/hir_ty/src/traits/chalk/mapping.rs
@@ -10,7 +10,7 @@ use chalk_ir::{
10use chalk_solve::rust_ir; 10use chalk_solve::rust_ir;
11 11
12use base_db::salsa::InternKey; 12use base_db::salsa::InternKey;
13use hir_def::{type_ref::Mutability, AssocContainerId, GenericDefId, Lookup, TypeAliasId}; 13use hir_def::{AssocContainerId, GenericDefId, Lookup, TypeAliasId};
14 14
15use crate::{ 15use crate::{
16 db::HirDatabase, 16 db::HirDatabase,
@@ -65,7 +65,7 @@ impl ToChalk for Ty {
65 } 65 }
66 Ty::Raw(mutability, substs) => { 66 Ty::Raw(mutability, substs) => {
67 let ty = substs[0].clone().to_chalk(db); 67 let ty = substs[0].clone().to_chalk(db);
68 chalk_ir::TyKind::Raw(mutability.to_chalk(db), ty).intern(&Interner) 68 chalk_ir::TyKind::Raw(mutability, ty).intern(&Interner)
69 } 69 }
70 Ty::Slice(substs) => { 70 Ty::Slice(substs) => {
71 chalk_ir::TyKind::Slice(substs[0].clone().to_chalk(db)).intern(&Interner) 71 chalk_ir::TyKind::Slice(substs[0].clone().to_chalk(db)).intern(&Interner)
@@ -86,7 +86,7 @@ impl ToChalk for Ty {
86 86
87 Ty::Adt(adt_id, substs) => { 87 Ty::Adt(adt_id, substs) => {
88 let substitution = substs.to_chalk(db); 88 let substitution = substs.to_chalk(db);
89 chalk_ir::TyKind::Adt(chalk_ir::AdtId(adt_id), substitution).intern(&Interner) 89 chalk_ir::TyKind::Adt(adt_id, substitution).intern(&Interner)
90 } 90 }
91 Ty::Alias(AliasTy::Projection(proj_ty)) => { 91 Ty::Alias(AliasTy::Projection(proj_ty)) => {
92 let associated_ty_id = TypeAliasAsAssocType(proj_ty.associated_ty).to_chalk(db); 92 let associated_ty_id = TypeAliasAsAssocType(proj_ty.associated_ty).to_chalk(db);
@@ -183,7 +183,7 @@ impl ToChalk for Ty {
183 Ty::Dyn(predicates) 183 Ty::Dyn(predicates)
184 } 184 }
185 185
186 chalk_ir::TyKind::Adt(struct_id, subst) => Ty::Adt(struct_id.0, from_chalk(db, subst)), 186 chalk_ir::TyKind::Adt(adt_id, subst) => Ty::Adt(adt_id, from_chalk(db, subst)),
187 chalk_ir::TyKind::AssociatedType(type_id, subst) => Ty::AssociatedType( 187 chalk_ir::TyKind::AssociatedType(type_id, subst) => Ty::AssociatedType(
188 from_chalk::<TypeAliasAsAssocType, _>(db, type_id).0, 188 from_chalk::<TypeAliasAsAssocType, _>(db, type_id).0,
189 from_chalk(db, subst), 189 from_chalk(db, subst),
@@ -198,11 +198,11 @@ impl ToChalk for Ty {
198 Ty::Tuple(cardinality, from_chalk(db, subst)) 198 Ty::Tuple(cardinality, from_chalk(db, subst))
199 } 199 }
200 chalk_ir::TyKind::Raw(mutability, ty) => { 200 chalk_ir::TyKind::Raw(mutability, ty) => {
201 Ty::Raw(from_chalk(db, mutability), Substs::single(from_chalk(db, ty))) 201 Ty::Raw(mutability, Substs::single(from_chalk(db, ty)))
202 } 202 }
203 chalk_ir::TyKind::Slice(ty) => Ty::Slice(Substs::single(from_chalk(db, ty))), 203 chalk_ir::TyKind::Slice(ty) => Ty::Slice(Substs::single(from_chalk(db, ty))),
204 chalk_ir::TyKind::Ref(mutability, _lifetime, ty) => { 204 chalk_ir::TyKind::Ref(mutability, _lifetime, ty) => {
205 Ty::Ref(from_chalk(db, mutability), Substs::single(from_chalk(db, ty))) 205 Ty::Ref(mutability, Substs::single(from_chalk(db, ty)))
206 } 206 }
207 chalk_ir::TyKind::Str => Ty::Str, 207 chalk_ir::TyKind::Str => Ty::Str,
208 chalk_ir::TyKind::Never => Ty::Never, 208 chalk_ir::TyKind::Never => Ty::Never,
@@ -230,12 +230,12 @@ impl ToChalk for Ty {
230/// fake lifetime here, because Chalks built-in logic may expect it to be there. 230/// fake lifetime here, because Chalks built-in logic may expect it to be there.
231fn ref_to_chalk( 231fn ref_to_chalk(
232 db: &dyn HirDatabase, 232 db: &dyn HirDatabase,
233 mutability: Mutability, 233 mutability: chalk_ir::Mutability,
234 subst: Substs, 234 subst: Substs,
235) -> chalk_ir::Ty<Interner> { 235) -> chalk_ir::Ty<Interner> {
236 let arg = subst[0].clone().to_chalk(db); 236 let arg = subst[0].clone().to_chalk(db);
237 let lifetime = LifetimeData::Static.intern(&Interner); 237 let lifetime = LifetimeData::Static.intern(&Interner);
238 chalk_ir::TyKind::Ref(mutability.to_chalk(db), lifetime, arg).intern(&Interner) 238 chalk_ir::TyKind::Ref(mutability, lifetime, arg).intern(&Interner)
239} 239}
240 240
241/// We currently don't model constants, but Chalk does. So, we have to insert a 241/// We currently don't model constants, but Chalk does. So, we have to insert a
@@ -313,22 +313,6 @@ impl ToChalk for OpaqueTyId {
313 } 313 }
314} 314}
315 315
316impl ToChalk for Mutability {
317 type Chalk = chalk_ir::Mutability;
318 fn to_chalk(self, _db: &dyn HirDatabase) -> Self::Chalk {
319 match self {
320 Mutability::Shared => chalk_ir::Mutability::Not,
321 Mutability::Mut => chalk_ir::Mutability::Mut,
322 }
323 }
324 fn from_chalk(_db: &dyn HirDatabase, chalk: Self::Chalk) -> Self {
325 match chalk {
326 chalk_ir::Mutability::Mut => Mutability::Mut,
327 chalk_ir::Mutability::Not => Mutability::Shared,
328 }
329 }
330}
331
332impl ToChalk for hir_def::ImplId { 316impl ToChalk for hir_def::ImplId {
333 type Chalk = ImplId; 317 type Chalk = ImplId;
334 318
@@ -341,18 +325,6 @@ impl ToChalk for hir_def::ImplId {
341 } 325 }
342} 326}
343 327
344impl ToChalk for hir_def::AdtId {
345 type Chalk = AdtId;
346
347 fn to_chalk(self, _db: &dyn HirDatabase) -> Self::Chalk {
348 chalk_ir::AdtId(self.into())
349 }
350
351 fn from_chalk(_db: &dyn HirDatabase, id: AdtId) -> Self {
352 id.0
353 }
354}
355
356impl ToChalk for CallableDefId { 328impl ToChalk for CallableDefId {
357 type Chalk = FnDefId; 329 type Chalk = FnDefId;
358 330
diff --git a/crates/ide/src/folding_ranges.rs b/crates/ide/src/folding_ranges.rs
index 45170dd29..4b1b24562 100644
--- a/crates/ide/src/folding_ranges.rs
+++ b/crates/ide/src/folding_ranges.rs
@@ -6,7 +6,7 @@ use syntax::{
6 ast::{self, AstNode, AstToken, VisibilityOwner}, 6 ast::{self, AstNode, AstToken, VisibilityOwner},
7 Direction, NodeOrToken, SourceFile, 7 Direction, NodeOrToken, SourceFile,
8 SyntaxKind::{self, *}, 8 SyntaxKind::{self, *},
9 SyntaxNode, TextRange, 9 SyntaxNode, TextRange, TextSize,
10}; 10};
11 11
12#[derive(Debug, PartialEq, Eq)] 12#[derive(Debug, PartialEq, Eq)]
@@ -16,6 +16,7 @@ pub enum FoldKind {
16 Mods, 16 Mods,
17 Block, 17 Block,
18 ArgList, 18 ArgList,
19 Region,
19} 20}
20 21
21#[derive(Debug)] 22#[derive(Debug)]
@@ -29,6 +30,8 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
29 let mut visited_comments = FxHashSet::default(); 30 let mut visited_comments = FxHashSet::default();
30 let mut visited_imports = FxHashSet::default(); 31 let mut visited_imports = FxHashSet::default();
31 let mut visited_mods = FxHashSet::default(); 32 let mut visited_mods = FxHashSet::default();
33 // regions can be nested, here is a LIFO buffer
34 let mut regions_starts: Vec<TextSize> = vec![];
32 35
33 for element in file.syntax().descendants_with_tokens() { 36 for element in file.syntax().descendants_with_tokens() {
34 // Fold items that span multiple lines 37 // Fold items that span multiple lines
@@ -48,10 +51,25 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
48 // Fold groups of comments 51 // Fold groups of comments
49 if let Some(comment) = ast::Comment::cast(token) { 52 if let Some(comment) = ast::Comment::cast(token) {
50 if !visited_comments.contains(&comment) { 53 if !visited_comments.contains(&comment) {
51 if let Some(range) = 54 // regions are not real comments
52 contiguous_range_for_comment(comment, &mut visited_comments) 55 if comment.text().trim().starts_with("// region:") {
53 { 56 regions_starts.push(comment.syntax().text_range().start());
54 res.push(Fold { range, kind: FoldKind::Comment }) 57 } else if comment.text().trim().starts_with("// endregion") {
58 if let Some(region) = regions_starts.pop() {
59 res.push(Fold {
60 range: TextRange::new(
61 region,
62 comment.syntax().text_range().end(),
63 ),
64 kind: FoldKind::Region,
65 })
66 }
67 } else {
68 if let Some(range) =
69 contiguous_range_for_comment(comment, &mut visited_comments)
70 {
71 res.push(Fold { range, kind: FoldKind::Comment })
72 }
55 } 73 }
56 } 74 }
57 } 75 }
@@ -175,9 +193,16 @@ fn contiguous_range_for_comment(
175 } 193 }
176 if let Some(c) = ast::Comment::cast(token) { 194 if let Some(c) = ast::Comment::cast(token) {
177 if c.kind() == group_kind { 195 if c.kind() == group_kind {
178 visited.insert(c.clone()); 196 // regions are not real comments
179 last = c; 197 if c.text().trim().starts_with("// region:")
180 continue; 198 || c.text().trim().starts_with("// endregion")
199 {
200 break;
201 } else {
202 visited.insert(c.clone());
203 last = c;
204 continue;
205 }
181 } 206 }
182 } 207 }
183 // The comment group ends because either: 208 // The comment group ends because either:
@@ -224,6 +249,7 @@ mod tests {
224 FoldKind::Mods => "mods", 249 FoldKind::Mods => "mods",
225 FoldKind::Block => "block", 250 FoldKind::Block => "block",
226 FoldKind::ArgList => "arglist", 251 FoldKind::ArgList => "arglist",
252 FoldKind::Region => "region",
227 }; 253 };
228 assert_eq!(kind, &attr.unwrap()); 254 assert_eq!(kind, &attr.unwrap());
229 } 255 }
@@ -418,4 +444,17 @@ fn foo<fold arglist>(
418"#, 444"#,
419 ) 445 )
420 } 446 }
447
448 #[test]
449 fn fold_region() {
450 check(
451 r#"
452// 1. some normal comment
453<fold region>// region: test
454// 2. some normal comment
455calling_function(x,y);
456// endregion: test</fold>
457"#,
458 )
459 }
421} 460}
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs
index 9a605b09d..a9454cfa3 100644
--- a/crates/ide/src/hover.rs
+++ b/crates/ide/src/hover.rs
@@ -5,6 +5,7 @@ use hir::{
5use ide_db::{ 5use ide_db::{
6 base_db::SourceDatabase, 6 base_db::SourceDatabase,
7 defs::{Definition, NameClass, NameRefClass}, 7 defs::{Definition, NameClass, NameRefClass},
8 helpers::FamousDefs,
8 RootDatabase, 9 RootDatabase,
9}; 10};
10use itertools::Itertools; 11use itertools::Itertools;
@@ -107,16 +108,14 @@ pub(crate) fn hover(
107 } 108 }
108 }; 109 };
109 if let Some(definition) = definition { 110 if let Some(definition) = definition {
110 if let Some(markup) = hover_for_definition(db, definition) { 111 let famous_defs = match &definition {
111 let markup = markup.as_str(); 112 Definition::ModuleDef(ModuleDef::BuiltinType(_)) => {
112 let markup = if !markdown { 113 Some(FamousDefs(&sema, sema.scope(&node).krate()))
113 remove_markdown(markup) 114 }
114 } else if links_in_hover { 115 _ => None,
115 rewrite_links(db, markup, &definition) 116 };
116 } else { 117 if let Some(markup) = hover_for_definition(db, definition, famous_defs.as_ref()) {
117 remove_links(markup) 118 res.markup = process_markup(sema.db, definition, &markup, links_in_hover, markdown);
118 };
119 res.markup = Markup::from(markup);
120 if let Some(action) = show_implementations_action(db, definition) { 119 if let Some(action) = show_implementations_action(db, definition) {
121 res.actions.push(action); 120 res.actions.push(action);
122 } 121 }
@@ -138,6 +137,9 @@ pub(crate) fn hover(
138 // don't highlight the entire parent node on comment hover 137 // don't highlight the entire parent node on comment hover
139 return None; 138 return None;
140 } 139 }
140 if let res @ Some(_) = hover_for_keyword(&sema, links_in_hover, markdown, &token) {
141 return res;
142 }
141 143
142 let node = token 144 let node = token
143 .ancestors() 145 .ancestors()
@@ -272,6 +274,24 @@ fn hover_markup(
272 } 274 }
273} 275}
274 276
277fn process_markup(
278 db: &RootDatabase,
279 def: Definition,
280 markup: &Markup,
281 links_in_hover: bool,
282 markdown: bool,
283) -> Markup {
284 let markup = markup.as_str();
285 let markup = if !markdown {
286 remove_markdown(markup)
287 } else if links_in_hover {
288 rewrite_links(db, markup, &def)
289 } else {
290 remove_links(markup)
291 };
292 Markup::from(markup)
293}
294
275fn definition_owner_name(db: &RootDatabase, def: &Definition) -> Option<String> { 295fn definition_owner_name(db: &RootDatabase, def: &Definition) -> Option<String> {
276 match def { 296 match def {
277 Definition::Field(f) => Some(f.parent_def(db).name(db)), 297 Definition::Field(f) => Some(f.parent_def(db).name(db)),
@@ -304,7 +324,11 @@ fn definition_mod_path(db: &RootDatabase, def: &Definition) -> Option<String> {
304 def.module(db).map(|module| render_path(db, module, definition_owner_name(db, def))) 324 def.module(db).map(|module| render_path(db, module, definition_owner_name(db, def)))
305} 325}
306 326
307fn hover_for_definition(db: &RootDatabase, def: Definition) -> Option<Markup> { 327fn hover_for_definition(
328 db: &RootDatabase,
329 def: Definition,
330 famous_defs: Option<&FamousDefs>,
331) -> Option<Markup> {
308 let mod_path = definition_mod_path(db, &def); 332 let mod_path = definition_mod_path(db, &def);
309 return match def { 333 return match def {
310 Definition::Macro(it) => { 334 Definition::Macro(it) => {
@@ -339,7 +363,9 @@ fn hover_for_definition(db: &RootDatabase, def: Definition) -> Option<Markup> {
339 ModuleDef::Static(it) => from_def_source(db, it, mod_path), 363 ModuleDef::Static(it) => from_def_source(db, it, mod_path),
340 ModuleDef::Trait(it) => from_def_source(db, it, mod_path), 364 ModuleDef::Trait(it) => from_def_source(db, it, mod_path),
341 ModuleDef::TypeAlias(it) => from_def_source(db, it, mod_path), 365 ModuleDef::TypeAlias(it) => from_def_source(db, it, mod_path),
342 ModuleDef::BuiltinType(it) => Some(Markup::fenced_block(&it.name())), 366 ModuleDef::BuiltinType(it) => famous_defs
367 .and_then(|fd| hover_for_builtin(fd, it))
368 .or_else(|| Some(Markup::fenced_block(&it.name()))),
343 }, 369 },
344 Definition::Local(it) => Some(Markup::fenced_block(&it.ty(db).display(db))), 370 Definition::Local(it) => Some(Markup::fenced_block(&it.ty(db).display(db))),
345 Definition::SelfType(impl_def) => { 371 Definition::SelfType(impl_def) => {
@@ -380,11 +406,52 @@ fn hover_for_definition(db: &RootDatabase, def: Definition) -> Option<Markup> {
380 } 406 }
381} 407}
382 408
409fn hover_for_keyword(
410 sema: &Semantics<RootDatabase>,
411 links_in_hover: bool,
412 markdown: bool,
413 token: &SyntaxToken,
414) -> Option<RangeInfo<HoverResult>> {
415 if !token.kind().is_keyword() {
416 return None;
417 }
418 let famous_defs = FamousDefs(&sema, sema.scope(&token.parent()).krate());
419 // std exposes {}_keyword modules with docstrings on the root to document keywords
420 let keyword_mod = format!("{}_keyword", token.text());
421 let doc_owner = find_std_module(&famous_defs, &keyword_mod)?;
422 let docs = doc_owner.attrs(sema.db).docs()?;
423 let markup = process_markup(
424 sema.db,
425 Definition::ModuleDef(doc_owner.into()),
426 &hover_markup(Some(docs.into()), Some(token.text().into()), None)?,
427 links_in_hover,
428 markdown,
429 );
430 Some(RangeInfo::new(token.text_range(), HoverResult { markup, actions: Default::default() }))
431}
432
433fn hover_for_builtin(famous_defs: &FamousDefs, builtin: hir::BuiltinType) -> Option<Markup> {
434 // std exposes prim_{} modules with docstrings on the root to document the builtins
435 let primitive_mod = format!("prim_{}", builtin.name());
436 let doc_owner = find_std_module(famous_defs, &primitive_mod)?;
437 let docs = doc_owner.attrs(famous_defs.0.db).docs()?;
438 hover_markup(Some(docs.into()), Some(builtin.name().to_string()), None)
439}
440
441fn find_std_module(famous_defs: &FamousDefs, name: &str) -> Option<hir::Module> {
442 let db = famous_defs.0.db;
443 let std_crate = famous_defs.std()?;
444 let std_root_module = std_crate.root_module(db);
445 std_root_module
446 .children(db)
447 .find(|module| module.name(db).map_or(false, |module| module.to_string() == name))
448}
449
383fn pick_best(tokens: TokenAtOffset<SyntaxToken>) -> Option<SyntaxToken> { 450fn pick_best(tokens: TokenAtOffset<SyntaxToken>) -> Option<SyntaxToken> {
384 return tokens.max_by_key(priority); 451 return tokens.max_by_key(priority);
385 fn priority(n: &SyntaxToken) -> usize { 452 fn priority(n: &SyntaxToken) -> usize {
386 match n.kind() { 453 match n.kind() {
387 IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] => 3, 454 IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] => 3,
388 T!['('] | T![')'] => 2, 455 T!['('] | T![')'] => 2,
389 kind if kind.is_trivia() => 0, 456 kind if kind.is_trivia() => 0,
390 _ => 1, 457 _ => 1,
@@ -3496,4 +3563,75 @@ mod foo$0;
3496 "#]], 3563 "#]],
3497 ); 3564 );
3498 } 3565 }
3566
3567 #[test]
3568 fn hover_self_in_use() {
3569 check(
3570 r#"
3571//! This should not appear
3572mod foo {
3573 /// But this should appear
3574 pub mod bar {}
3575}
3576use foo::bar::{self$0};
3577"#,
3578 expect![[r#"
3579 *self*
3580
3581 ```rust
3582 test::foo
3583 ```
3584
3585 ```rust
3586 pub mod bar
3587 ```
3588
3589 ---
3590
3591 But this should appear
3592 "#]],
3593 )
3594 }
3595
3596 #[test]
3597 fn hover_keyword() {
3598 let ra_fixture = r#"//- /main.rs crate:main deps:std
3599fn f() { retur$0n; }"#;
3600 let fixture = format!("{}\n{}", ra_fixture, FamousDefs::FIXTURE);
3601 check(
3602 &fixture,
3603 expect![[r#"
3604 *return*
3605
3606 ```rust
3607 return
3608 ```
3609
3610 ---
3611
3612 Docs for return_keyword
3613 "#]],
3614 );
3615 }
3616
3617 #[test]
3618 fn hover_builtin() {
3619 let ra_fixture = r#"//- /main.rs crate:main deps:std
3620cosnt _: &str$0 = ""; }"#;
3621 let fixture = format!("{}\n{}", ra_fixture, FamousDefs::FIXTURE);
3622 check(
3623 &fixture,
3624 expect![[r#"
3625 *str*
3626
3627 ```rust
3628 str
3629 ```
3630
3631 ---
3632
3633 Docs for prim_str
3634 "#]],
3635 );
3636 }
3499} 3637}
diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs
index 2c077ed1f..7fcae13e0 100644
--- a/crates/ide/src/join_lines.rs
+++ b/crates/ide/src/join_lines.rs
@@ -7,6 +7,7 @@ use syntax::{
7 SyntaxKind::{self, USE_TREE, WHITESPACE}, 7 SyntaxKind::{self, USE_TREE, WHITESPACE},
8 SyntaxNode, SyntaxToken, TextRange, TextSize, T, 8 SyntaxNode, SyntaxToken, TextRange, TextSize, T,
9}; 9};
10use test_utils::mark;
10use text_edit::{TextEdit, TextEditBuilder}; 11use text_edit::{TextEdit, TextEditBuilder};
11 12
12// Feature: Join Lines 13// Feature: Join Lines
@@ -44,9 +45,9 @@ pub(crate) fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
44 let text = token.text(); 45 let text = token.text();
45 for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') { 46 for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') {
46 let pos: TextSize = (pos as u32).into(); 47 let pos: TextSize = (pos as u32).into();
47 let off = token.text_range().start() + range.start() + pos; 48 let offset = token.text_range().start() + range.start() + pos;
48 if !edit.invalidates_offset(off) { 49 if !edit.invalidates_offset(offset) {
49 remove_newline(&mut edit, &token, off); 50 remove_newline(&mut edit, &token, offset);
50 } 51 }
51 } 52 }
52 } 53 }
@@ -56,14 +57,25 @@ pub(crate) fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
56 57
57fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextSize) { 58fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextSize) {
58 if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 { 59 if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 {
59 // The node is either the first or the last in the file 60 let mut string_open_quote = false;
60 let suff = &token.text()[TextRange::new( 61 if let Some(string) = ast::String::cast(token.clone()) {
61 offset - token.text_range().start() + TextSize::of('\n'), 62 if let Some(range) = string.open_quote_text_range() {
62 TextSize::of(token.text()), 63 mark::hit!(join_string_literal);
63 )]; 64 string_open_quote = range.end() == offset;
64 let spaces = suff.bytes().take_while(|&b| b == b' ').count(); 65 }
65 66 }
66 edit.replace(TextRange::at(offset, ((spaces + 1) as u32).into()), " ".to_string()); 67
68 let n_spaces_after_line_break = {
69 let suff = &token.text()[TextRange::new(
70 offset - token.text_range().start() + TextSize::of('\n'),
71 TextSize::of(token.text()),
72 )];
73 suff.bytes().take_while(|&b| b == b' ').count()
74 };
75
76 let range = TextRange::at(offset, ((n_spaces_after_line_break + 1) as u32).into());
77 let replace_with = if string_open_quote { "" } else { " " };
78 edit.replace(range, replace_with.to_string());
67 return; 79 return;
68 } 80 }
69 81
@@ -194,7 +206,7 @@ fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str {
194#[cfg(test)] 206#[cfg(test)]
195mod tests { 207mod tests {
196 use syntax::SourceFile; 208 use syntax::SourceFile;
197 use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range}; 209 use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range, mark};
198 210
199 use super::*; 211 use super::*;
200 212
@@ -771,4 +783,42 @@ fn foo() {
771 ", 783 ",
772 ); 784 );
773 } 785 }
786
787 #[test]
788 fn join_string_literal() {
789 mark::check!(join_string_literal);
790 check_join_lines(
791 r#"
792fn main() {
793 $0"
794hello
795";
796}
797"#,
798 r#"
799fn main() {
800 $0"hello
801";
802}
803"#,
804 );
805
806 check_join_lines(
807 r#"
808fn main() {
809 "
810$0hello
811world
812";
813}
814"#,
815 r#"
816fn main() {
817 "
818$0hello world
819";
820}
821"#,
822 );
823 }
774} 824}
diff --git a/crates/ide/src/references/rename.rs b/crates/ide/src/references/rename.rs
index 22ddeeae3..1919639a3 100644
--- a/crates/ide/src/references/rename.rs
+++ b/crates/ide/src/references/rename.rs
@@ -88,6 +88,8 @@ pub(crate) fn rename_with_semantics(
88 let def = find_definition(sema, syntax, position)?; 88 let def = find_definition(sema, syntax, position)?;
89 match def { 89 match def {
90 Definition::ModuleDef(ModuleDef::Module(module)) => rename_mod(&sema, module, new_name), 90 Definition::ModuleDef(ModuleDef::Module(module)) => rename_mod(&sema, module, new_name),
91 Definition::SelfType(_) => bail!("Cannot rename `Self`"),
92 Definition::ModuleDef(ModuleDef::BuiltinType(_)) => bail!("Cannot rename builtin type"),
91 def => rename_reference(sema, def, new_name), 93 def => rename_reference(sema, def, new_name),
92 } 94 }
93} 95}
@@ -122,7 +124,7 @@ fn check_identifier(new_name: &str) -> RenameResult<IdentifierKind> {
122 Ok(IdentifierKind::Lifetime) 124 Ok(IdentifierKind::Lifetime)
123 } 125 }
124 (SyntaxKind::LIFETIME_IDENT, _) => { 126 (SyntaxKind::LIFETIME_IDENT, _) => {
125 bail!("Invalid name `{0}`: Cannot rename lifetime to {0}", new_name) 127 bail!("Invalid name `{}`: not a lifetime identifier", new_name)
126 } 128 }
127 (_, Some(syntax_error)) => bail!("Invalid name `{}`: {}", new_name, syntax_error), 129 (_, Some(syntax_error)) => bail!("Invalid name `{}`: {}", new_name, syntax_error),
128 (_, None) => bail!("Invalid name `{}`: not an identifier", new_name), 130 (_, None) => bail!("Invalid name `{}`: not an identifier", new_name),
@@ -162,119 +164,6 @@ fn find_definition(
162 .ok_or_else(|| format_err!("No references found at position")) 164 .ok_or_else(|| format_err!("No references found at position"))
163} 165}
164 166
165fn source_edit_from_references(
166 _sema: &Semantics<RootDatabase>,
167 file_id: FileId,
168 references: &[FileReference],
169 def: Definition,
170 new_name: &str,
171) -> (FileId, TextEdit) {
172 let mut edit = TextEdit::builder();
173 for reference in references {
174 let (range, replacement) = match &reference.name {
175 // if the ranges differ then the node is inside a macro call, we can't really attempt
176 // to make special rewrites like shorthand syntax and such, so just rename the node in
177 // the macro input
178 ast::NameLike::NameRef(name_ref)
179 if name_ref.syntax().text_range() == reference.range =>
180 {
181 source_edit_from_name_ref(name_ref, new_name, def)
182 }
183 ast::NameLike::Name(name) if name.syntax().text_range() == reference.range => {
184 source_edit_from_name(name, new_name)
185 }
186 _ => None,
187 }
188 .unwrap_or_else(|| (reference.range, new_name.to_string()));
189 edit.replace(range, replacement);
190 }
191 (file_id, edit.finish())
192}
193
194fn source_edit_from_name(name: &ast::Name, new_name: &str) -> Option<(TextRange, String)> {
195 if let Some(_) = ast::RecordPatField::for_field_name(name) {
196 if let Some(ident_pat) = name.syntax().parent().and_then(ast::IdentPat::cast) {
197 return Some((
198 TextRange::empty(ident_pat.syntax().text_range().start()),
199 format!("{}: ", new_name),
200 ));
201 }
202 }
203 None
204}
205
206fn source_edit_from_name_ref(
207 name_ref: &ast::NameRef,
208 new_name: &str,
209 def: Definition,
210) -> Option<(TextRange, String)> {
211 if let Some(record_field) = ast::RecordExprField::for_name_ref(name_ref) {
212 let rcf_name_ref = record_field.name_ref();
213 let rcf_expr = record_field.expr();
214 match (rcf_name_ref, rcf_expr.and_then(|it| it.name_ref())) {
215 // field: init-expr, check if we can use a field init shorthand
216 (Some(field_name), Some(init)) => {
217 if field_name == *name_ref {
218 if init.text() == new_name {
219 mark::hit!(test_rename_field_put_init_shorthand);
220 // same names, we can use a shorthand here instead.
221 // we do not want to erase attributes hence this range start
222 let s = field_name.syntax().text_range().start();
223 let e = record_field.syntax().text_range().end();
224 return Some((TextRange::new(s, e), new_name.to_owned()));
225 }
226 } else if init == *name_ref {
227 if field_name.text() == new_name {
228 mark::hit!(test_rename_local_put_init_shorthand);
229 // same names, we can use a shorthand here instead.
230 // we do not want to erase attributes hence this range start
231 let s = field_name.syntax().text_range().start();
232 let e = record_field.syntax().text_range().end();
233 return Some((TextRange::new(s, e), new_name.to_owned()));
234 }
235 }
236 None
237 }
238 // init shorthand
239 // FIXME: instead of splitting the shorthand, recursively trigger a rename of the
240 // other name https://github.com/rust-analyzer/rust-analyzer/issues/6547
241 (None, Some(_)) if matches!(def, Definition::Field(_)) => {
242 mark::hit!(test_rename_field_in_field_shorthand);
243 let s = name_ref.syntax().text_range().start();
244 Some((TextRange::empty(s), format!("{}: ", new_name)))
245 }
246 (None, Some(_)) if matches!(def, Definition::Local(_)) => {
247 mark::hit!(test_rename_local_in_field_shorthand);
248 let s = name_ref.syntax().text_range().end();
249 Some((TextRange::empty(s), format!(": {}", new_name)))
250 }
251 _ => None,
252 }
253 } else if let Some(record_field) = ast::RecordPatField::for_field_name_ref(name_ref) {
254 let rcf_name_ref = record_field.name_ref();
255 let rcf_pat = record_field.pat();
256 match (rcf_name_ref, rcf_pat) {
257 // field: rename
258 (Some(field_name), Some(ast::Pat::IdentPat(pat))) if field_name == *name_ref => {
259 // field name is being renamed
260 if pat.name().map_or(false, |it| it.text() == new_name) {
261 mark::hit!(test_rename_field_put_init_shorthand_pat);
262 // same names, we can use a shorthand here instead/
263 // we do not want to erase attributes hence this range start
264 let s = field_name.syntax().text_range().start();
265 let e = record_field.syntax().text_range().end();
266 Some((TextRange::new(s, e), pat.to_string()))
267 } else {
268 None
269 }
270 }
271 _ => None,
272 }
273 } else {
274 None
275 }
276}
277
278fn rename_mod( 167fn rename_mod(
279 sema: &Semantics<RootDatabase>, 168 sema: &Semantics<RootDatabase>,
280 module: Module, 169 module: Module,
@@ -308,18 +197,75 @@ fn rename_mod(
308 TextEdit::replace(name.syntax().text_range(), new_name.to_string()), 197 TextEdit::replace(name.syntax().text_range(), new_name.to_string()),
309 ), 198 ),
310 _ => unreachable!(), 199 _ => unreachable!(),
311 }; 200 }
312 } 201 }
313 let def = Definition::ModuleDef(ModuleDef::Module(module)); 202 let def = Definition::ModuleDef(ModuleDef::Module(module));
314 let usages = def.usages(sema).all(); 203 let usages = def.usages(sema).all();
315 let ref_edits = usages.iter().map(|(&file_id, references)| { 204 let ref_edits = usages.iter().map(|(&file_id, references)| {
316 source_edit_from_references(sema, file_id, references, def, new_name) 205 (file_id, source_edit_from_references(references, def, new_name))
317 }); 206 });
318 source_change.extend(ref_edits); 207 source_change.extend(ref_edits);
319 208
320 Ok(source_change) 209 Ok(source_change)
321} 210}
322 211
212fn rename_reference(
213 sema: &Semantics<RootDatabase>,
214 def: Definition,
215 new_name: &str,
216) -> RenameResult<SourceChange> {
217 let ident_kind = check_identifier(new_name)?;
218
219 let def_is_lbl_or_lt = matches!(
220 def,
221 Definition::GenericParam(hir::GenericParam::LifetimeParam(_)) | Definition::Label(_)
222 );
223 match (ident_kind, def) {
224 (IdentifierKind::ToSelf, _)
225 | (IdentifierKind::Underscore, _)
226 | (IdentifierKind::Ident, _)
227 if def_is_lbl_or_lt =>
228 {
229 mark::hit!(rename_not_a_lifetime_ident_ref);
230 bail!("Invalid name `{}`: not a lifetime identifier", new_name)
231 }
232 (IdentifierKind::Lifetime, _) if def_is_lbl_or_lt => mark::hit!(rename_lifetime),
233 (IdentifierKind::Lifetime, _) => {
234 mark::hit!(rename_not_an_ident_ref);
235 bail!("Invalid name `{}`: not an identifier", new_name)
236 }
237 (IdentifierKind::ToSelf, Definition::Local(local)) if local.is_self(sema.db) => {
238 // no-op
239 mark::hit!(rename_self_to_self);
240 return Ok(SourceChange::default());
241 }
242 (ident_kind, Definition::Local(local)) if local.is_self(sema.db) => {
243 mark::hit!(rename_self_to_param);
244 return rename_self_to_param(sema, local, new_name, ident_kind);
245 }
246 (IdentifierKind::ToSelf, Definition::Local(local)) => {
247 mark::hit!(rename_to_self);
248 return rename_to_self(sema, local);
249 }
250 (IdentifierKind::ToSelf, _) => bail!("Invalid name `{}`: not an identifier", new_name),
251 (IdentifierKind::Ident, _) | (IdentifierKind::Underscore, _) => mark::hit!(rename_ident),
252 }
253
254 let usages = def.usages(sema).all();
255 if !usages.is_empty() && ident_kind == IdentifierKind::Underscore {
256 mark::hit!(rename_underscore_multiple);
257 bail!("Cannot rename reference to `_` as it is being referenced multiple times");
258 }
259 let mut source_change = SourceChange::default();
260 source_change.extend(usages.iter().map(|(&file_id, references)| {
261 (file_id, source_edit_from_references(&references, def, new_name))
262 }));
263
264 let (file_id, edit) = source_edit_from_def(sema, def, new_name)?;
265 source_change.insert_source_edit(file_id, edit);
266 Ok(source_change)
267}
268
323fn rename_to_self(sema: &Semantics<RootDatabase>, local: hir::Local) -> RenameResult<SourceChange> { 269fn rename_to_self(sema: &Semantics<RootDatabase>, local: hir::Local) -> RenameResult<SourceChange> {
324 if never!(local.is_self(sema.db)) { 270 if never!(local.is_self(sema.db)) {
325 bail!("rename_to_self invoked on self"); 271 bail!("rename_to_self invoked on self");
@@ -384,7 +330,7 @@ fn rename_to_self(sema: &Semantics<RootDatabase>, local: hir::Local) -> RenameRe
384 let usages = def.usages(sema).all(); 330 let usages = def.usages(sema).all();
385 let mut source_change = SourceChange::default(); 331 let mut source_change = SourceChange::default();
386 source_change.extend(usages.iter().map(|(&file_id, references)| { 332 source_change.extend(usages.iter().map(|(&file_id, references)| {
387 source_edit_from_references(sema, file_id, references, def, "self") 333 (file_id, source_edit_from_references(references, def, "self"))
388 })); 334 }));
389 source_change.insert_source_edit( 335 source_change.insert_source_edit(
390 file_id.original_file(sema.db), 336 file_id.original_file(sema.db),
@@ -394,29 +340,6 @@ fn rename_to_self(sema: &Semantics<RootDatabase>, local: hir::Local) -> RenameRe
394 Ok(source_change) 340 Ok(source_change)
395} 341}
396 342
397fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: &str) -> Option<TextEdit> {
398 fn target_type_name(impl_def: &ast::Impl) -> Option<String> {
399 if let Some(ast::Type::PathType(p)) = impl_def.self_ty() {
400 return Some(p.path()?.segment()?.name_ref()?.text().to_string());
401 }
402 None
403 }
404
405 let impl_def = self_param.syntax().ancestors().find_map(|it| ast::Impl::cast(it))?;
406 let type_name = target_type_name(&impl_def)?;
407
408 let mut replacement_text = String::from(new_name);
409 replacement_text.push_str(": ");
410 match (self_param.amp_token(), self_param.mut_token()) {
411 (None, None) => (),
412 (Some(_), None) => replacement_text.push('&'),
413 (_, Some(_)) => replacement_text.push_str("&mut "),
414 };
415 replacement_text.push_str(type_name.as_str());
416
417 Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text))
418}
419
420fn rename_self_to_param( 343fn rename_self_to_param(
421 sema: &Semantics<RootDatabase>, 344 sema: &Semantics<RootDatabase>,
422 local: hir::Local, 345 local: hir::Local,
@@ -441,66 +364,143 @@ fn rename_self_to_param(
441 let mut source_change = SourceChange::default(); 364 let mut source_change = SourceChange::default();
442 source_change.insert_source_edit(file_id.original_file(sema.db), edit); 365 source_change.insert_source_edit(file_id.original_file(sema.db), edit);
443 source_change.extend(usages.iter().map(|(&file_id, references)| { 366 source_change.extend(usages.iter().map(|(&file_id, references)| {
444 source_edit_from_references(sema, file_id, &references, def, new_name) 367 (file_id, source_edit_from_references(&references, def, new_name))
445 })); 368 }));
446 Ok(source_change) 369 Ok(source_change)
447} 370}
448 371
449fn rename_reference( 372fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: &str) -> Option<TextEdit> {
450 sema: &Semantics<RootDatabase>, 373 fn target_type_name(impl_def: &ast::Impl) -> Option<String> {
374 if let Some(ast::Type::PathType(p)) = impl_def.self_ty() {
375 return Some(p.path()?.segment()?.name_ref()?.text().to_string());
376 }
377 None
378 }
379
380 let impl_def = self_param.syntax().ancestors().find_map(|it| ast::Impl::cast(it))?;
381 let type_name = target_type_name(&impl_def)?;
382
383 let mut replacement_text = String::from(new_name);
384 replacement_text.push_str(": ");
385 match (self_param.amp_token(), self_param.mut_token()) {
386 (Some(_), None) => replacement_text.push('&'),
387 (Some(_), Some(_)) => replacement_text.push_str("&mut "),
388 (_, _) => (),
389 };
390 replacement_text.push_str(type_name.as_str());
391
392 Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text))
393}
394
395fn source_edit_from_references(
396 references: &[FileReference],
451 def: Definition, 397 def: Definition,
452 new_name: &str, 398 new_name: &str,
453) -> RenameResult<SourceChange> { 399) -> TextEdit {
454 let ident_kind = check_identifier(new_name)?; 400 let mut edit = TextEdit::builder();
455 401 for reference in references {
456 let def_is_lbl_or_lt = matches!( 402 let (range, replacement) = match &reference.name {
457 def, 403 // if the ranges differ then the node is inside a macro call, we can't really attempt
458 Definition::GenericParam(hir::GenericParam::LifetimeParam(_)) | Definition::Label(_) 404 // to make special rewrites like shorthand syntax and such, so just rename the node in
459 ); 405 // the macro input
460 match (ident_kind, def) { 406 ast::NameLike::NameRef(name_ref)
461 (IdentifierKind::ToSelf, _) 407 if name_ref.syntax().text_range() == reference.range =>
462 | (IdentifierKind::Underscore, _) 408 {
463 | (IdentifierKind::Ident, _) 409 source_edit_from_name_ref(name_ref, new_name, def)
464 if def_is_lbl_or_lt => 410 }
465 { 411 ast::NameLike::Name(name) if name.syntax().text_range() == reference.range => {
466 mark::hit!(rename_not_a_lifetime_ident_ref); 412 source_edit_from_name(name, new_name)
467 bail!("Invalid name `{}`: not a lifetime identifier", new_name) 413 }
468 } 414 _ => None,
469 (IdentifierKind::Lifetime, _) if def_is_lbl_or_lt => mark::hit!(rename_lifetime),
470 (IdentifierKind::Lifetime, _) => {
471 mark::hit!(rename_not_an_ident_ref);
472 bail!("Invalid name `{}`: not an identifier", new_name)
473 }
474 (IdentifierKind::ToSelf, Definition::Local(local)) if local.is_self(sema.db) => {
475 // no-op
476 mark::hit!(rename_self_to_self);
477 return Ok(SourceChange::default());
478 }
479 (ident_kind, Definition::Local(local)) if local.is_self(sema.db) => {
480 mark::hit!(rename_self_to_param);
481 return rename_self_to_param(sema, local, new_name, ident_kind);
482 }
483 (IdentifierKind::ToSelf, Definition::Local(local)) => {
484 mark::hit!(rename_to_self);
485 return rename_to_self(sema, local);
486 } 415 }
487 (IdentifierKind::ToSelf, _) => bail!("Invalid name `{}`: not an identifier", new_name), 416 .unwrap_or_else(|| (reference.range, new_name.to_string()));
488 (IdentifierKind::Ident, _) | (IdentifierKind::Underscore, _) => mark::hit!(rename_ident), 417 edit.replace(range, replacement);
489 } 418 }
419 edit.finish()
420}
490 421
491 let usages = def.usages(sema).all(); 422fn source_edit_from_name(name: &ast::Name, new_name: &str) -> Option<(TextRange, String)> {
492 if !usages.is_empty() && ident_kind == IdentifierKind::Underscore { 423 if let Some(_) = ast::RecordPatField::for_field_name(name) {
493 mark::hit!(rename_underscore_multiple); 424 if let Some(ident_pat) = name.syntax().parent().and_then(ast::IdentPat::cast) {
494 bail!("Cannot rename reference to `_` as it is being referenced multiple times"); 425 return Some((
426 TextRange::empty(ident_pat.syntax().text_range().start()),
427 [new_name, ": "].concat(),
428 ));
429 }
495 } 430 }
496 let mut source_change = SourceChange::default(); 431 None
497 source_change.extend(usages.iter().map(|(&file_id, references)| { 432}
498 source_edit_from_references(sema, file_id, &references, def, new_name)
499 }));
500 433
501 let (file_id, edit) = source_edit_from_def(sema, def, new_name)?; 434fn source_edit_from_name_ref(
502 source_change.insert_source_edit(file_id, edit); 435 name_ref: &ast::NameRef,
503 Ok(source_change) 436 new_name: &str,
437 def: Definition,
438) -> Option<(TextRange, String)> {
439 if let Some(record_field) = ast::RecordExprField::for_name_ref(name_ref) {
440 let rcf_name_ref = record_field.name_ref();
441 let rcf_expr = record_field.expr();
442 match (rcf_name_ref, rcf_expr.and_then(|it| it.name_ref())) {
443 // field: init-expr, check if we can use a field init shorthand
444 (Some(field_name), Some(init)) => {
445 if field_name == *name_ref {
446 if init.text() == new_name {
447 mark::hit!(test_rename_field_put_init_shorthand);
448 // same names, we can use a shorthand here instead.
449 // we do not want to erase attributes hence this range start
450 let s = field_name.syntax().text_range().start();
451 let e = record_field.syntax().text_range().end();
452 return Some((TextRange::new(s, e), new_name.to_owned()));
453 }
454 } else if init == *name_ref {
455 if field_name.text() == new_name {
456 mark::hit!(test_rename_local_put_init_shorthand);
457 // same names, we can use a shorthand here instead.
458 // we do not want to erase attributes hence this range start
459 let s = field_name.syntax().text_range().start();
460 let e = record_field.syntax().text_range().end();
461 return Some((TextRange::new(s, e), new_name.to_owned()));
462 }
463 }
464 None
465 }
466 // init shorthand
467 // FIXME: instead of splitting the shorthand, recursively trigger a rename of the
468 // other name https://github.com/rust-analyzer/rust-analyzer/issues/6547
469 (None, Some(_)) if matches!(def, Definition::Field(_)) => {
470 mark::hit!(test_rename_field_in_field_shorthand);
471 let s = name_ref.syntax().text_range().start();
472 Some((TextRange::empty(s), format!("{}: ", new_name)))
473 }
474 (None, Some(_)) if matches!(def, Definition::Local(_)) => {
475 mark::hit!(test_rename_local_in_field_shorthand);
476 let s = name_ref.syntax().text_range().end();
477 Some((TextRange::empty(s), format!(": {}", new_name)))
478 }
479 _ => None,
480 }
481 } else if let Some(record_field) = ast::RecordPatField::for_field_name_ref(name_ref) {
482 let rcf_name_ref = record_field.name_ref();
483 let rcf_pat = record_field.pat();
484 match (rcf_name_ref, rcf_pat) {
485 // field: rename
486 (Some(field_name), Some(ast::Pat::IdentPat(pat))) if field_name == *name_ref => {
487 // field name is being renamed
488 if pat.name().map_or(false, |it| it.text() == new_name) {
489 mark::hit!(test_rename_field_put_init_shorthand_pat);
490 // same names, we can use a shorthand here instead/
491 // we do not want to erase attributes hence this range start
492 let s = field_name.syntax().text_range().start();
493 let e = record_field.syntax().text_range().end();
494 Some((TextRange::new(s, e), pat.to_string()))
495 } else {
496 None
497 }
498 }
499 _ => None,
500 }
501 } else {
502 None
503 }
504} 504}
505 505
506fn source_edit_from_def( 506fn source_edit_from_def(
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs
index 1e7baed20..65f60891e 100644
--- a/crates/ide/src/runnables.rs
+++ b/crates/ide/src/runnables.rs
@@ -189,7 +189,7 @@ pub(crate) fn doc_owner_to_def(
189) -> Option<Definition> { 189) -> Option<Definition> {
190 let res: hir::ModuleDef = match_ast! { 190 let res: hir::ModuleDef = match_ast! {
191 match item { 191 match item {
192 ast::SourceFile(it) => sema.scope(&item).module()?.into(), 192 ast::SourceFile(_it) => sema.scope(&item).module()?.into(),
193 ast::Fn(it) => sema.to_def(&it)?.into(), 193 ast::Fn(it) => sema.to_def(&it)?.into(),
194 ast::Struct(it) => sema.to_def(&it)?.into(), 194 ast::Struct(it) => sema.to_def(&it)?.into(),
195 ast::Enum(it) => sema.to_def(&it)?.into(), 195 ast::Enum(it) => sema.to_def(&it)?.into(),
diff --git a/crates/ide_assists/src/handlers/add_turbo_fish.rs b/crates/ide_assists/src/handlers/add_turbo_fish.rs
index 8e9ea4fad..a08b55ebb 100644
--- a/crates/ide_assists/src/handlers/add_turbo_fish.rs
+++ b/crates/ide_assists/src/handlers/add_turbo_fish.rs
@@ -31,6 +31,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<(
31 return None; 31 return None;
32 } 32 }
33 mark::hit!(add_turbo_fish_after_call); 33 mark::hit!(add_turbo_fish_after_call);
34 mark::hit!(add_type_ascription_after_call);
34 arg_list.l_paren_token()?.prev_token().filter(|it| it.kind() == SyntaxKind::IDENT) 35 arg_list.l_paren_token()?.prev_token().filter(|it| it.kind() == SyntaxKind::IDENT)
35 })?; 36 })?;
36 let next_token = ident.next_token()?; 37 let next_token = ident.next_token()?;
@@ -52,6 +53,24 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<(
52 mark::hit!(add_turbo_fish_non_generic); 53 mark::hit!(add_turbo_fish_non_generic);
53 return None; 54 return None;
54 } 55 }
56
57 if let Some(let_stmt) = ctx.find_node_at_offset::<ast::LetStmt>() {
58 if let_stmt.colon_token().is_none() {
59 let type_pos = let_stmt.pat()?.syntax().last_token()?.text_range().end();
60 acc.add(
61 AssistId("add_type_ascription", AssistKind::RefactorRewrite),
62 "Add `: _` before assignment operator",
63 ident.text_range(),
64 |builder| match ctx.config.snippet_cap {
65 Some(cap) => builder.insert_snippet(cap, type_pos, ": ${0:_}"),
66 None => builder.insert(type_pos, ": _"),
67 },
68 )?
69 } else {
70 mark::hit!(add_type_ascription_already_typed);
71 }
72 }
73
55 acc.add( 74 acc.add(
56 AssistId("add_turbo_fish", AssistKind::RefactorRewrite), 75 AssistId("add_turbo_fish", AssistKind::RefactorRewrite),
57 "Add `::<>`", 76 "Add `::<>`",
@@ -65,7 +84,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<(
65 84
66#[cfg(test)] 85#[cfg(test)]
67mod tests { 86mod tests {
68 use crate::tests::{check_assist, check_assist_not_applicable}; 87 use crate::tests::{check_assist, check_assist_by_label, check_assist_not_applicable};
69 88
70 use super::*; 89 use super::*;
71 use test_utils::mark; 90 use test_utils::mark;
@@ -161,4 +180,91 @@ fn main() {
161"#, 180"#,
162 ); 181 );
163 } 182 }
183
184 #[test]
185 fn add_type_ascription_function() {
186 check_assist_by_label(
187 add_turbo_fish,
188 r#"
189fn make<T>() -> T {}
190fn main() {
191 let x = make$0();
192}
193"#,
194 r#"
195fn make<T>() -> T {}
196fn main() {
197 let x: ${0:_} = make();
198}
199"#,
200 "Add `: _` before assignment operator",
201 );
202 }
203
204 #[test]
205 fn add_type_ascription_after_call() {
206 mark::check!(add_type_ascription_after_call);
207 check_assist_by_label(
208 add_turbo_fish,
209 r#"
210fn make<T>() -> T {}
211fn main() {
212 let x = make()$0;
213}
214"#,
215 r#"
216fn make<T>() -> T {}
217fn main() {
218 let x: ${0:_} = make();
219}
220"#,
221 "Add `: _` before assignment operator",
222 );
223 }
224
225 #[test]
226 fn add_type_ascription_method() {
227 check_assist_by_label(
228 add_turbo_fish,
229 r#"
230struct S;
231impl S {
232 fn make<T>(&self) -> T {}
233}
234fn main() {
235 let x = S.make$0();
236}
237"#,
238 r#"
239struct S;
240impl S {
241 fn make<T>(&self) -> T {}
242}
243fn main() {
244 let x: ${0:_} = S.make();
245}
246"#,
247 "Add `: _` before assignment operator",
248 );
249 }
250
251 #[test]
252 fn add_type_ascription_already_typed() {
253 mark::check!(add_type_ascription_already_typed);
254 check_assist(
255 add_turbo_fish,
256 r#"
257fn make<T>() -> T {}
258fn main() {
259 let x: () = make$0();
260}
261"#,
262 r#"
263fn make<T>() -> T {}
264fn main() {
265 let x: () = make::<${0:_}>();
266}
267"#,
268 );
269 }
164} 270}
diff --git a/crates/ide_assists/src/handlers/apply_demorgan.rs b/crates/ide_assists/src/handlers/apply_demorgan.rs
index 6997ea048..128b1eb56 100644
--- a/crates/ide_assists/src/handlers/apply_demorgan.rs
+++ b/crates/ide_assists/src/handlers/apply_demorgan.rs
@@ -1,4 +1,5 @@
1use syntax::ast::{self, AstNode}; 1use syntax::ast::{self, AstNode};
2use test_utils::mark;
2 3
3use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKind, Assists}; 4use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKind, Assists};
4 5
@@ -43,9 +44,36 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext) -> Option<(
43 "Apply De Morgan's law", 44 "Apply De Morgan's law",
44 op_range, 45 op_range,
45 |edit| { 46 |edit| {
47 let paren_expr = expr.syntax().parent().and_then(|parent| ast::ParenExpr::cast(parent));
48
49 let neg_expr = paren_expr
50 .clone()
51 .and_then(|paren_expr| paren_expr.syntax().parent())
52 .and_then(|parent| ast::PrefixExpr::cast(parent))
53 .and_then(|prefix_expr| {
54 if prefix_expr.op_kind().unwrap() == ast::PrefixOp::Not {
55 Some(prefix_expr)
56 } else {
57 None
58 }
59 });
60
46 edit.replace(op_range, opposite_op); 61 edit.replace(op_range, opposite_op);
47 edit.replace(lhs_range, format!("!({}", not_lhs.syntax().text())); 62
48 edit.replace(rhs_range, format!("{})", not_rhs.syntax().text())); 63 if let Some(paren_expr) = paren_expr {
64 edit.replace(lhs_range, not_lhs.syntax().text());
65 edit.replace(rhs_range, not_rhs.syntax().text());
66 if let Some(neg_expr) = neg_expr {
67 mark::hit!(demorgan_double_negation);
68 edit.replace(neg_expr.op_token().unwrap().text_range(), "");
69 } else {
70 mark::hit!(demorgan_double_parens);
71 edit.replace(paren_expr.l_paren_token().unwrap().text_range(), "!(");
72 }
73 } else {
74 edit.replace(lhs_range, format!("!({}", not_lhs.syntax().text()));
75 edit.replace(rhs_range, format!("{})", not_rhs.syntax().text()));
76 }
49 }, 77 },
50 ) 78 )
51} 79}
@@ -62,6 +90,7 @@ fn opposite_logic_op(kind: ast::BinOp) -> Option<&'static str> {
62#[cfg(test)] 90#[cfg(test)]
63mod tests { 91mod tests {
64 use ide_db::helpers::FamousDefs; 92 use ide_db::helpers::FamousDefs;
93 use test_utils::mark;
65 94
66 use super::*; 95 use super::*;
67 96
@@ -156,4 +185,16 @@ fn f() {
156 fn demorgan_doesnt_apply_with_cursor_not_on_op() { 185 fn demorgan_doesnt_apply_with_cursor_not_on_op() {
157 check_assist_not_applicable(apply_demorgan, "fn f() { $0 !x || !x }") 186 check_assist_not_applicable(apply_demorgan, "fn f() { $0 !x || !x }")
158 } 187 }
188
189 #[test]
190 fn demorgan_doesnt_double_negation() {
191 mark::check!(demorgan_double_negation);
192 check_assist(apply_demorgan, "fn f() { !(x ||$0 x) }", "fn f() { (!x && !x) }")
193 }
194
195 #[test]
196 fn demorgan_doesnt_double_parens() {
197 mark::check!(demorgan_double_parens);
198 check_assist(apply_demorgan, "fn f() { (x ||$0 x) }", "fn f() { !(!x && !x) }")
199 }
159} 200}
diff --git a/crates/ide_assists/src/handlers/convert_comment_block.rs b/crates/ide_assists/src/handlers/convert_comment_block.rs
new file mode 100644
index 000000000..cdc45fc42
--- /dev/null
+++ b/crates/ide_assists/src/handlers/convert_comment_block.rs
@@ -0,0 +1,419 @@
1use itertools::Itertools;
2use std::convert::identity;
3use syntax::{
4 ast::{
5 self,
6 edit::IndentLevel,
7 Comment, CommentKind,
8 CommentPlacement::{Inner, Outer},
9 CommentShape::{self, Block, Line},
10 Whitespace,
11 },
12 AstToken, Direction, SyntaxElement, TextRange,
13};
14
15use crate::{AssistContext, AssistId, AssistKind, Assists};
16
17/// Assist: line_to_block
18///
19/// Converts comments between block and single-line form
20///
21/// ```
22/// // Multi-line
23/// // comment
24/// ```
25/// ->
26/// ```
27/// /**
28/// Multi-line
29/// comment
30/// */
31/// ```
32pub(crate) fn convert_comment_block(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
33 if let Some(comment) = ctx.find_token_at_offset::<ast::Comment>() {
34 // Only allow comments which are alone on their line
35 if let Some(prev) = comment.syntax().prev_token() {
36 if Whitespace::cast(prev).filter(|w| w.text().contains('\n')).is_none() {
37 return None;
38 }
39 }
40
41 return match comment.kind().shape {
42 ast::CommentShape::Block => block_to_line(acc, comment),
43 ast::CommentShape::Line => line_to_block(acc, comment),
44 };
45 }
46
47 return None;
48}
49
50fn block_to_line(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
51 let target = comment.syntax().text_range();
52
53 acc.add(
54 AssistId("block_to_line", AssistKind::RefactorRewrite),
55 "Replace block comment with line comments",
56 target,
57 |edit| {
58 let indentation = IndentLevel::from_token(comment.syntax());
59 let line_prefix =
60 comment_kind_prefix(CommentKind { shape: CommentShape::Line, ..comment.kind() });
61
62 let text = comment.text();
63 let text = &text[comment.prefix().len()..(text.len() - "*/".len())].trim();
64
65 let lines = text.lines().peekable();
66
67 let indent_spaces = indentation.to_string();
68 let output = lines
69 .map(|l| l.trim_start_matches(&indent_spaces))
70 .map(|l| {
71 // Don't introduce trailing whitespace
72 if l.is_empty() {
73 line_prefix.to_string()
74 } else {
75 format!("{} {}", line_prefix, l.trim_start_matches(&indent_spaces))
76 }
77 })
78 .join(&format!("\n{}", indent_spaces));
79
80 edit.replace(target, output)
81 },
82 )
83}
84
85fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
86 // Find all the comments we'll be collapsing into a block
87 let comments = relevant_line_comments(&comment);
88
89 // Establish the target of our edit based on the comments we found
90 let target = TextRange::new(
91 comments[0].syntax().text_range().start(),
92 comments.last().unwrap().syntax().text_range().end(),
93 );
94
95 acc.add(
96 AssistId("line_to_block", AssistKind::RefactorRewrite),
97 "Replace line comments with a single block comment",
98 target,
99 |edit| {
100 // We pick a single indentation level for the whole block comment based on the
101 // comment where the assist was invoked. This will be prepended to the
102 // contents of each line comment when they're put into the block comment.
103 let indentation = IndentLevel::from_token(&comment.syntax());
104
105 let block_comment_body =
106 comments.into_iter().map(|c| line_comment_text(indentation, c)).join("\n");
107
108 let block_prefix =
109 comment_kind_prefix(CommentKind { shape: CommentShape::Block, ..comment.kind() });
110
111 let output =
112 format!("{}\n{}\n{}*/", block_prefix, block_comment_body, indentation.to_string());
113
114 edit.replace(target, output)
115 },
116 )
117}
118
119/// The line -> block assist can be invoked from anywhere within a sequence of line comments.
120/// relevant_line_comments crawls backwards and forwards finding the complete sequence of comments that will
121/// be joined.
122fn relevant_line_comments(comment: &ast::Comment) -> Vec<Comment> {
123 // The prefix identifies the kind of comment we're dealing with
124 let prefix = comment.prefix();
125 let same_prefix = |c: &ast::Comment| c.prefix() == prefix;
126
127 // These tokens are allowed to exist between comments
128 let skippable = |not: &SyntaxElement| {
129 not.clone()
130 .into_token()
131 .and_then(Whitespace::cast)
132 .map(|w| !w.spans_multiple_lines())
133 .unwrap_or(false)
134 };
135
136 // Find all preceding comments (in reverse order) that have the same prefix
137 let prev_comments = comment
138 .syntax()
139 .siblings_with_tokens(Direction::Prev)
140 .filter(|s| !skippable(s))
141 .map(|not| not.into_token().and_then(Comment::cast).filter(same_prefix))
142 .take_while(|opt_com| opt_com.is_some())
143 .filter_map(identity)
144 .skip(1); // skip the first element so we don't duplicate it in next_comments
145
146 let next_comments = comment
147 .syntax()
148 .siblings_with_tokens(Direction::Next)
149 .filter(|s| !skippable(s))
150 .map(|not| not.into_token().and_then(Comment::cast).filter(same_prefix))
151 .take_while(|opt_com| opt_com.is_some())
152 .filter_map(identity);
153
154 let mut comments: Vec<_> = prev_comments.collect();
155 comments.reverse();
156 comments.extend(next_comments);
157 comments
158}
159
160// Line comments usually begin with a single space character following the prefix as seen here:
161//^
162// But comments can also include indented text:
163// > Hello there
164//
165// We handle this by stripping *AT MOST* one space character from the start of the line
166// This has its own problems because it can cause alignment issues:
167//
168// /*
169// a ----> a
170//b ----> b
171// */
172//
173// But since such comments aren't idiomatic we're okay with this.
174fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String {
175 let contents_without_prefix = comm.text().strip_prefix(comm.prefix()).unwrap();
176 let contents = contents_without_prefix.strip_prefix(' ').unwrap_or(contents_without_prefix);
177
178 // Don't add the indentation if the line is empty
179 if contents.is_empty() {
180 contents.to_owned()
181 } else {
182 indentation.to_string() + &contents
183 }
184}
185
186fn comment_kind_prefix(ck: ast::CommentKind) -> &'static str {
187 match (ck.shape, ck.doc) {
188 (Line, Some(Inner)) => "//!",
189 (Line, Some(Outer)) => "///",
190 (Line, None) => "//",
191 (Block, Some(Inner)) => "/*!",
192 (Block, Some(Outer)) => "/**",
193 (Block, None) => "/*",
194 }
195}
196
197#[cfg(test)]
198mod tests {
199 use crate::tests::{check_assist, check_assist_not_applicable};
200
201 use super::*;
202
203 #[test]
204 fn single_line_to_block() {
205 check_assist(
206 convert_comment_block,
207 r#"
208// line$0 comment
209fn main() {
210 foo();
211}
212"#,
213 r#"
214/*
215line comment
216*/
217fn main() {
218 foo();
219}
220"#,
221 );
222 }
223
224 #[test]
225 fn single_line_to_block_indented() {
226 check_assist(
227 convert_comment_block,
228 r#"
229fn main() {
230 // line$0 comment
231 foo();
232}
233"#,
234 r#"
235fn main() {
236 /*
237 line comment
238 */
239 foo();
240}
241"#,
242 );
243 }
244
245 #[test]
246 fn multiline_to_block() {
247 check_assist(
248 convert_comment_block,
249 r#"
250fn main() {
251 // above
252 // line$0 comment
253 //
254 // below
255 foo();
256}
257"#,
258 r#"
259fn main() {
260 /*
261 above
262 line comment
263
264 below
265 */
266 foo();
267}
268"#,
269 );
270 }
271
272 #[test]
273 fn end_of_line_to_block() {
274 check_assist_not_applicable(
275 convert_comment_block,
276 r#"
277fn main() {
278 foo(); // end-of-line$0 comment
279}
280"#,
281 );
282 }
283
284 #[test]
285 fn single_line_different_kinds() {
286 check_assist(
287 convert_comment_block,
288 r#"
289fn main() {
290 /// different prefix
291 // line$0 comment
292 // below
293 foo();
294}
295"#,
296 r#"
297fn main() {
298 /// different prefix
299 /*
300 line comment
301 below
302 */
303 foo();
304}
305"#,
306 );
307 }
308
309 #[test]
310 fn single_line_separate_chunks() {
311 check_assist(
312 convert_comment_block,
313 r#"
314fn main() {
315 // different chunk
316
317 // line$0 comment
318 // below
319 foo();
320}
321"#,
322 r#"
323fn main() {
324 // different chunk
325
326 /*
327 line comment
328 below
329 */
330 foo();
331}
332"#,
333 );
334 }
335
336 #[test]
337 fn doc_block_comment_to_lines() {
338 check_assist(
339 convert_comment_block,
340 r#"
341/**
342 hi$0 there
343*/
344"#,
345 r#"
346/// hi there
347"#,
348 );
349 }
350
351 #[test]
352 fn block_comment_to_lines() {
353 check_assist(
354 convert_comment_block,
355 r#"
356/*
357 hi$0 there
358*/
359"#,
360 r#"
361// hi there
362"#,
363 );
364 }
365
366 #[test]
367 fn inner_doc_block_to_lines() {
368 check_assist(
369 convert_comment_block,
370 r#"
371/*!
372 hi$0 there
373*/
374"#,
375 r#"
376//! hi there
377"#,
378 );
379 }
380
381 #[test]
382 fn block_to_lines_indent() {
383 check_assist(
384 convert_comment_block,
385 r#"
386fn main() {
387 /*!
388 hi$0 there
389
390 ```
391 code_sample
392 ```
393 */
394}
395"#,
396 r#"
397fn main() {
398 //! hi there
399 //!
400 //! ```
401 //! code_sample
402 //! ```
403}
404"#,
405 );
406 }
407
408 #[test]
409 fn end_of_line_block_to_line() {
410 check_assist_not_applicable(
411 convert_comment_block,
412 r#"
413fn main() {
414 foo(); /* end-of-line$0 comment */
415}
416"#,
417 );
418 }
419}
diff --git a/crates/ide_assists/src/handlers/extract_variable.rs b/crates/ide_assists/src/handlers/extract_variable.rs
index 98f3dc6ca..312ac7ac4 100644
--- a/crates/ide_assists/src/handlers/extract_variable.rs
+++ b/crates/ide_assists/src/handlers/extract_variable.rs
@@ -8,7 +8,7 @@ use syntax::{
8}; 8};
9use test_utils::mark; 9use test_utils::mark;
10 10
11use crate::{AssistContext, AssistId, AssistKind, Assists}; 11use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
12 12
13// Assist: extract_variable 13// Assist: extract_variable
14// 14//
@@ -54,7 +54,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext) -> Option
54 54
55 let var_name = match &field_shorthand { 55 let var_name = match &field_shorthand {
56 Some(it) => it.to_string(), 56 Some(it) => it.to_string(),
57 None => "var_name".to_string(), 57 None => suggest_name::variable(&to_extract, &ctx.sema),
58 }; 58 };
59 let expr_range = match &field_shorthand { 59 let expr_range = match &field_shorthand {
60 Some(it) => it.syntax().text_range().cover(to_extract.syntax().text_range()), 60 Some(it) => it.syntax().text_range().cover(to_extract.syntax().text_range()),
@@ -274,8 +274,8 @@ fn foo() {
274"#, 274"#,
275 r#" 275 r#"
276fn foo() { 276fn foo() {
277 let $0var_name = bar(1 + 1); 277 let $0bar = bar(1 + 1);
278 var_name 278 bar
279} 279}
280"#, 280"#,
281 ) 281 )
@@ -401,8 +401,8 @@ fn main() {
401", 401",
402 " 402 "
403fn main() { 403fn main() {
404 let $0var_name = bar.foo(); 404 let $0foo = bar.foo();
405 let v = var_name; 405 let v = foo;
406} 406}
407", 407",
408 ); 408 );
@@ -557,6 +557,202 @@ fn main() {
557 } 557 }
558 558
559 #[test] 559 #[test]
560 fn extract_var_name_from_type() {
561 check_assist(
562 extract_variable,
563 r#"
564struct Test(i32);
565
566fn foo() -> Test {
567 $0{ Test(10) }$0
568}
569"#,
570 r#"
571struct Test(i32);
572
573fn foo() -> Test {
574 let $0test = { Test(10) };
575 test
576}
577"#,
578 )
579 }
580
581 #[test]
582 fn extract_var_name_from_parameter() {
583 check_assist(
584 extract_variable,
585 r#"
586fn bar(test: u32, size: u32)
587
588fn foo() {
589 bar(1, $01+1$0);
590}
591"#,
592 r#"
593fn bar(test: u32, size: u32)
594
595fn foo() {
596 let $0size = 1+1;
597 bar(1, size);
598}
599"#,
600 )
601 }
602
603 #[test]
604 fn extract_var_parameter_name_has_precedence_over_type() {
605 check_assist(
606 extract_variable,
607 r#"
608struct TextSize(u32);
609fn bar(test: u32, size: TextSize)
610
611fn foo() {
612 bar(1, $0{ TextSize(1+1) }$0);
613}
614"#,
615 r#"
616struct TextSize(u32);
617fn bar(test: u32, size: TextSize)
618
619fn foo() {
620 let $0size = { TextSize(1+1) };
621 bar(1, size);
622}
623"#,
624 )
625 }
626
627 #[test]
628 fn extract_var_name_from_function() {
629 check_assist(
630 extract_variable,
631 r#"
632fn is_required(test: u32, size: u32) -> bool
633
634fn foo() -> bool {
635 $0is_required(1, 2)$0
636}
637"#,
638 r#"
639fn is_required(test: u32, size: u32) -> bool
640
641fn foo() -> bool {
642 let $0is_required = is_required(1, 2);
643 is_required
644}
645"#,
646 )
647 }
648
649 #[test]
650 fn extract_var_name_from_method() {
651 check_assist(
652 extract_variable,
653 r#"
654struct S;
655impl S {
656 fn bar(&self, n: u32) -> u32 { n }
657}
658
659fn foo() -> u32 {
660 $0S.bar(1)$0
661}
662"#,
663 r#"
664struct S;
665impl S {
666 fn bar(&self, n: u32) -> u32 { n }
667}
668
669fn foo() -> u32 {
670 let $0bar = S.bar(1);
671 bar
672}
673"#,
674 )
675 }
676
677 #[test]
678 fn extract_var_name_from_method_param() {
679 check_assist(
680 extract_variable,
681 r#"
682struct S;
683impl S {
684 fn bar(&self, n: u32, size: u32) { n }
685}
686
687fn foo() {
688 S.bar($01 + 1$0, 2)
689}
690"#,
691 r#"
692struct S;
693impl S {
694 fn bar(&self, n: u32, size: u32) { n }
695}
696
697fn foo() {
698 let $0n = 1 + 1;
699 S.bar(n, 2)
700}
701"#,
702 )
703 }
704
705 #[test]
706 fn extract_var_name_from_ufcs_method_param() {
707 check_assist(
708 extract_variable,
709 r#"
710struct S;
711impl S {
712 fn bar(&self, n: u32, size: u32) { n }
713}
714
715fn foo() {
716 S::bar(&S, $01 + 1$0, 2)
717}
718"#,
719 r#"
720struct S;
721impl S {
722 fn bar(&self, n: u32, size: u32) { n }
723}
724
725fn foo() {
726 let $0n = 1 + 1;
727 S::bar(&S, n, 2)
728}
729"#,
730 )
731 }
732
733 #[test]
734 fn extract_var_parameter_name_has_precedence_over_function() {
735 check_assist(
736 extract_variable,
737 r#"
738fn bar(test: u32, size: u32)
739
740fn foo() {
741 bar(1, $0symbol_size(1, 2)$0);
742}
743"#,
744 r#"
745fn bar(test: u32, size: u32)
746
747fn foo() {
748 let $0size = symbol_size(1, 2);
749 bar(1, size);
750}
751"#,
752 )
753 }
754
755 #[test]
560 fn test_extract_var_for_return_not_applicable() { 756 fn test_extract_var_for_return_not_applicable() {
561 check_assist_not_applicable(extract_variable, "fn foo() { $0return$0; } "); 757 check_assist_not_applicable(extract_variable, "fn foo() { $0return$0; } ");
562 } 758 }
diff --git a/crates/ide_assists/src/lib.rs b/crates/ide_assists/src/lib.rs
index 53542d433..9c8148462 100644
--- a/crates/ide_assists/src/lib.rs
+++ b/crates/ide_assists/src/lib.rs
@@ -115,6 +115,7 @@ mod handlers {
115 mod auto_import; 115 mod auto_import;
116 mod change_visibility; 116 mod change_visibility;
117 mod convert_integer_literal; 117 mod convert_integer_literal;
118 mod convert_comment_block;
118 mod early_return; 119 mod early_return;
119 mod expand_glob_import; 120 mod expand_glob_import;
120 mod extract_function; 121 mod extract_function;
@@ -178,6 +179,7 @@ mod handlers {
178 auto_import::auto_import, 179 auto_import::auto_import,
179 change_visibility::change_visibility, 180 change_visibility::change_visibility,
180 convert_integer_literal::convert_integer_literal, 181 convert_integer_literal::convert_integer_literal,
182 convert_comment_block::convert_comment_block,
181 early_return::convert_to_guarded_return, 183 early_return::convert_to_guarded_return,
182 expand_glob_import::expand_glob_import, 184 expand_glob_import::expand_glob_import,
183 extract_struct_from_enum_variant::extract_struct_from_enum_variant, 185 extract_struct_from_enum_variant::extract_struct_from_enum_variant,
diff --git a/crates/ide_assists/src/utils.rs b/crates/ide_assists/src/utils.rs
index 880ab6fe3..62f959082 100644
--- a/crates/ide_assists/src/utils.rs
+++ b/crates/ide_assists/src/utils.rs
@@ -1,5 +1,7 @@
1//! Assorted functions shared by several assists. 1//! Assorted functions shared by several assists.
2 2
3pub(crate) mod suggest_name;
4
3use std::ops; 5use std::ops;
4 6
5use ast::TypeBoundsOwner; 7use ast::TypeBoundsOwner;
diff --git a/crates/ide_assists/src/utils/suggest_name.rs b/crates/ide_assists/src/utils/suggest_name.rs
new file mode 100644
index 000000000..533624c1f
--- /dev/null
+++ b/crates/ide_assists/src/utils/suggest_name.rs
@@ -0,0 +1,729 @@
1//! This module contains functions to suggest names for expressions, functions and other items
2
3use hir::Semantics;
4use ide_db::RootDatabase;
5use itertools::Itertools;
6use stdx::to_lower_snake_case;
7use syntax::{
8 ast::{self, NameOwner},
9 match_ast, AstNode,
10};
11
12/// Trait names, that will be ignored when in `impl Trait` and `dyn Trait`
13const USELESS_TRAITS: &[&str] = &["Send", "Sync", "Copy", "Clone", "Eq", "PartialEq"];
14
15/// Identifier names that won't be suggested, ever
16///
17/// **NOTE**: they all must be snake lower case
18const USELESS_NAMES: &[&str] =
19 &["new", "default", "option", "some", "none", "ok", "err", "str", "string"];
20
21/// Generic types replaced by their first argument
22///
23/// # Examples
24/// `Option<Name>` -> `Name`
25/// `Result<User, Error>` -> `User`
26const WRAPPER_TYPES: &[&str] = &["Box", "Option", "Result"];
27
28/// Prefixes to strip from methods names
29///
30/// # Examples
31/// `vec.as_slice()` -> `slice`
32/// `args.into_config()` -> `config`
33/// `bytes.to_vec()` -> `vec`
34const USELESS_METHOD_PREFIXES: &[&str] = &["into_", "as_", "to_"];
35
36/// Useless methods that are stripped from expression
37///
38/// # Examples
39/// `var.name().to_string()` -> `var.name()`
40const USELESS_METHODS: &[&str] = &[
41 "to_string",
42 "as_str",
43 "to_owned",
44 "as_ref",
45 "clone",
46 "cloned",
47 "expect",
48 "expect_none",
49 "unwrap",
50 "unwrap_none",
51 "unwrap_or",
52 "unwrap_or_default",
53 "unwrap_or_else",
54 "unwrap_unchecked",
55 "iter",
56 "into_iter",
57 "iter_mut",
58];
59
60/// Suggest name of variable for given expression
61///
62/// **NOTE**: it is caller's responsibility to guarantee uniqueness of the name.
63/// I.e. it doesn't look for names in scope.
64///
65/// # Current implementation
66///
67/// In current implementation, the function tries to get the name from
68/// the following sources:
69///
70/// * if expr is an argument to function/method, use paramter name
71/// * if expr is a function/method call, use function name
72/// * expression type name if it exists (E.g. `()`, `fn() -> ()` or `!` do not have names)
73/// * fallback: `var_name`
74///
75/// It also applies heuristics to filter out less informative names
76///
77/// Currently it sticks to the first name found.
78pub(crate) fn variable(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> String {
79 // `from_param` does not benifit from stripping
80 // it need the largest context possible
81 // so we check firstmost
82 if let Some(name) = from_param(expr, sema) {
83 return name;
84 }
85
86 let mut next_expr = Some(expr.clone());
87 while let Some(expr) = next_expr {
88 let name = from_call(&expr).or_else(|| from_type(&expr, sema));
89 if let Some(name) = name {
90 return name;
91 }
92
93 match expr {
94 ast::Expr::RefExpr(inner) => next_expr = inner.expr(),
95 ast::Expr::BoxExpr(inner) => next_expr = inner.expr(),
96 ast::Expr::AwaitExpr(inner) => next_expr = inner.expr(),
97 // ast::Expr::BlockExpr(block) => expr = block.tail_expr(),
98 ast::Expr::CastExpr(inner) => next_expr = inner.expr(),
99 ast::Expr::MethodCallExpr(method) if is_useless_method(&method) => {
100 next_expr = method.receiver();
101 }
102 ast::Expr::ParenExpr(inner) => next_expr = inner.expr(),
103 ast::Expr::TryExpr(inner) => next_expr = inner.expr(),
104 ast::Expr::PrefixExpr(prefix) if prefix.op_kind() == Some(ast::PrefixOp::Deref) => {
105 next_expr = prefix.expr()
106 }
107 _ => break,
108 }
109 }
110
111 "var_name".to_string()
112}
113
114fn normalize(name: &str) -> Option<String> {
115 let name = to_lower_snake_case(name);
116
117 if USELESS_NAMES.contains(&name.as_str()) {
118 return None;
119 }
120
121 if !is_valid_name(&name) {
122 return None;
123 }
124
125 Some(name)
126}
127
128fn is_valid_name(name: &str) -> bool {
129 match syntax::lex_single_syntax_kind(name) {
130 Some((syntax::SyntaxKind::IDENT, _error)) => true,
131 _ => false,
132 }
133}
134
135fn is_useless_method(method: &ast::MethodCallExpr) -> bool {
136 let ident = method.name_ref().and_then(|it| it.ident_token());
137
138 if let Some(ident) = ident {
139 USELESS_METHODS.contains(&ident.text())
140 } else {
141 false
142 }
143}
144
145fn from_call(expr: &ast::Expr) -> Option<String> {
146 from_func_call(expr).or_else(|| from_method_call(expr))
147}
148
149fn from_func_call(expr: &ast::Expr) -> Option<String> {
150 let call = match expr {
151 ast::Expr::CallExpr(call) => call,
152 _ => return None,
153 };
154 let func = match call.expr()? {
155 ast::Expr::PathExpr(path) => path,
156 _ => return None,
157 };
158 let ident = func.path()?.segment()?.name_ref()?.ident_token()?;
159 normalize(ident.text())
160}
161
162fn from_method_call(expr: &ast::Expr) -> Option<String> {
163 let method = match expr {
164 ast::Expr::MethodCallExpr(call) => call,
165 _ => return None,
166 };
167 let ident = method.name_ref()?.ident_token()?;
168 let mut name = ident.text();
169
170 if USELESS_METHODS.contains(&name) {
171 return None;
172 }
173
174 for prefix in USELESS_METHOD_PREFIXES {
175 if let Some(suffix) = name.strip_prefix(prefix) {
176 name = suffix;
177 break;
178 }
179 }
180
181 normalize(&name)
182}
183
184fn from_param(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<String> {
185 let arg_list = expr.syntax().parent().and_then(ast::ArgList::cast)?;
186 let args_parent = arg_list.syntax().parent()?;
187 let func = match_ast! {
188 match args_parent {
189 ast::CallExpr(call) => {
190 let func = call.expr()?;
191 let func_ty = sema.type_of_expr(&func)?;
192 func_ty.as_callable(sema.db)?
193 },
194 ast::MethodCallExpr(method) => sema.resolve_method_call_as_callable(&method)?,
195 _ => return None,
196 }
197 };
198
199 let (idx, _) = arg_list.args().find_position(|it| it == expr).unwrap();
200 let (pat, _) = func.params(sema.db).into_iter().nth(idx)?;
201 let pat = match pat? {
202 either::Either::Right(pat) => pat,
203 _ => return None,
204 };
205 let name = var_name_from_pat(&pat)?;
206 normalize(&name.to_string())
207}
208
209fn var_name_from_pat(pat: &ast::Pat) -> Option<ast::Name> {
210 match pat {
211 ast::Pat::IdentPat(var) => var.name(),
212 ast::Pat::RefPat(ref_pat) => var_name_from_pat(&ref_pat.pat()?),
213 ast::Pat::BoxPat(box_pat) => var_name_from_pat(&box_pat.pat()?),
214 _ => None,
215 }
216}
217
218fn from_type(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<String> {
219 let ty = sema.type_of_expr(expr)?;
220 let ty = ty.remove_ref().unwrap_or(ty);
221
222 name_of_type(&ty, sema.db)
223}
224
225fn name_of_type(ty: &hir::Type, db: &RootDatabase) -> Option<String> {
226 let name = if let Some(adt) = ty.as_adt() {
227 let name = adt.name(db).to_string();
228
229 if WRAPPER_TYPES.contains(&name.as_str()) {
230 let inner_ty = ty.type_parameters().next()?;
231 return name_of_type(&inner_ty, db);
232 }
233
234 name
235 } else if let Some(trait_) = ty.as_dyn_trait() {
236 trait_name(&trait_, db)?
237 } else if let Some(traits) = ty.as_impl_traits(db) {
238 let mut iter = traits.into_iter().filter_map(|t| trait_name(&t, db));
239 let name = iter.next()?;
240 if iter.next().is_some() {
241 return None;
242 }
243 name
244 } else {
245 return None;
246 };
247 normalize(&name)
248}
249
250fn trait_name(trait_: &hir::Trait, db: &RootDatabase) -> Option<String> {
251 let name = trait_.name(db).to_string();
252 if USELESS_TRAITS.contains(&name.as_str()) {
253 return None;
254 }
255 Some(name)
256}
257
258#[cfg(test)]
259mod tests {
260 use ide_db::base_db::{fixture::WithFixture, FileRange};
261
262 use super::*;
263
264 #[track_caller]
265 fn check(ra_fixture: &str, expected: &str) {
266 let (db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture);
267 let frange = FileRange { file_id, range: range_or_offset.into() };
268
269 let sema = Semantics::new(&db);
270 let source_file = sema.parse(frange.file_id);
271 let element = source_file.syntax().covering_element(frange.range);
272 let expr =
273 element.ancestors().find_map(ast::Expr::cast).expect("selection is not an expression");
274 assert_eq!(
275 expr.syntax().text_range(),
276 frange.range,
277 "selection is not an expression(yet contained in one)"
278 );
279 let name = variable(&expr, &sema);
280 assert_eq!(&name, expected);
281 }
282
283 #[test]
284 fn no_args() {
285 check(r#"fn foo() { $0bar()$0 }"#, "bar");
286 check(r#"fn foo() { $0bar.frobnicate()$0 }"#, "frobnicate");
287 }
288
289 #[test]
290 fn single_arg() {
291 check(r#"fn foo() { $0bar(1)$0 }"#, "bar");
292 }
293
294 #[test]
295 fn many_args() {
296 check(r#"fn foo() { $0bar(1, 2, 3)$0 }"#, "bar");
297 }
298
299 #[test]
300 fn path() {
301 check(r#"fn foo() { $0i32::bar(1, 2, 3)$0 }"#, "bar");
302 }
303
304 #[test]
305 fn generic_params() {
306 check(r#"fn foo() { $0bar::<i32>(1, 2, 3)$0 }"#, "bar");
307 check(r#"fn foo() { $0bar.frobnicate::<i32, u32>()$0 }"#, "frobnicate");
308 }
309
310 #[test]
311 fn to_name() {
312 check(
313 r#"
314struct Args;
315struct Config;
316impl Args {
317 fn to_config(&self) -> Config {}
318}
319fn foo() {
320 $0Args.to_config()$0;
321}
322"#,
323 "config",
324 );
325 }
326
327 #[test]
328 fn plain_func() {
329 check(
330 r#"
331fn bar(n: i32, m: u32);
332fn foo() { bar($01$0, 2) }
333"#,
334 "n",
335 );
336 }
337
338 #[test]
339 fn mut_param() {
340 check(
341 r#"
342fn bar(mut n: i32, m: u32);
343fn foo() { bar($01$0, 2) }
344"#,
345 "n",
346 );
347 }
348
349 #[test]
350 fn func_does_not_exist() {
351 check(r#"fn foo() { bar($01$0, 2) }"#, "var_name");
352 }
353
354 #[test]
355 fn unnamed_param() {
356 check(
357 r#"
358fn bar(_: i32, m: u32);
359fn foo() { bar($01$0, 2) }
360"#,
361 "var_name",
362 );
363 }
364
365 #[test]
366 fn tuple_pat() {
367 check(
368 r#"
369fn bar((n, k): (i32, i32), m: u32);
370fn foo() {
371 bar($0(1, 2)$0, 3)
372}
373"#,
374 "var_name",
375 );
376 }
377
378 #[test]
379 fn ref_pat() {
380 check(
381 r#"
382fn bar(&n: &i32, m: u32);
383fn foo() { bar($0&1$0, 3) }
384"#,
385 "n",
386 );
387 }
388
389 #[test]
390 fn box_pat() {
391 check(
392 r#"
393fn bar(box n: &i32, m: u32);
394fn foo() { bar($01$0, 3) }
395"#,
396 "n",
397 );
398 }
399
400 #[test]
401 fn param_out_of_index() {
402 check(
403 r#"
404fn bar(n: i32, m: u32);
405fn foo() { bar(1, 2, $03$0) }
406"#,
407 "var_name",
408 );
409 }
410
411 #[test]
412 fn generic_param_resolved() {
413 check(
414 r#"
415fn bar<T>(n: T, m: u32);
416fn foo() { bar($01$0, 2) }
417"#,
418 "n",
419 );
420 }
421
422 #[test]
423 fn generic_param_unresolved() {
424 check(
425 r#"
426fn bar<T>(n: T, m: u32);
427fn foo<T>(x: T) { bar($0x$0, 2) }
428"#,
429 "n",
430 );
431 }
432
433 #[test]
434 fn method() {
435 check(
436 r#"
437struct S;
438impl S { fn bar(&self, n: i32, m: u32); }
439fn foo() { S.bar($01$0, 2) }
440"#,
441 "n",
442 );
443 }
444
445 #[test]
446 fn method_ufcs() {
447 check(
448 r#"
449struct S;
450impl S { fn bar(&self, n: i32, m: u32); }
451fn foo() { S::bar(&S, $01$0, 2) }
452"#,
453 "n",
454 );
455 }
456
457 #[test]
458 fn method_self() {
459 check(
460 r#"
461struct S;
462impl S { fn bar(&self, n: i32, m: u32); }
463fn foo() { S::bar($0&S$0, 1, 2) }
464"#,
465 "s",
466 );
467 }
468
469 #[test]
470 fn method_self_named() {
471 check(
472 r#"
473struct S;
474impl S { fn bar(strukt: &Self, n: i32, m: u32); }
475fn foo() { S::bar($0&S$0, 1, 2) }
476"#,
477 "strukt",
478 );
479 }
480
481 #[test]
482 fn i32() {
483 check(r#"fn foo() { let _: i32 = $01$0; }"#, "var_name");
484 }
485
486 #[test]
487 fn u64() {
488 check(r#"fn foo() { let _: u64 = $01$0; }"#, "var_name");
489 }
490
491 #[test]
492 fn bool() {
493 check(r#"fn foo() { let _: bool = $0true$0; }"#, "var_name");
494 }
495
496 #[test]
497 fn struct_unit() {
498 check(
499 r#"
500struct Seed;
501fn foo() { let _ = $0Seed$0; }
502"#,
503 "seed",
504 );
505 }
506
507 #[test]
508 fn struct_unit_to_snake() {
509 check(
510 r#"
511struct SeedState;
512fn foo() { let _ = $0SeedState$0; }
513"#,
514 "seed_state",
515 );
516 }
517
518 #[test]
519 fn struct_single_arg() {
520 check(
521 r#"
522struct Seed(u32);
523fn foo() { let _ = $0Seed(0)$0; }
524"#,
525 "seed",
526 );
527 }
528
529 #[test]
530 fn struct_with_fields() {
531 check(
532 r#"
533struct Seed { value: u32 }
534fn foo() { let _ = $0Seed { value: 0 }$0; }
535"#,
536 "seed",
537 );
538 }
539
540 #[test]
541 fn enum_() {
542 check(
543 r#"
544enum Kind { A, B }
545fn foo() { let _ = $0Kind::A$0; }
546"#,
547 "kind",
548 );
549 }
550
551 #[test]
552 fn enum_generic_resolved() {
553 check(
554 r#"
555enum Kind<T> { A { x: T }, B }
556fn foo() { let _ = $0Kind::A { x:1 }$0; }
557"#,
558 "kind",
559 );
560 }
561
562 #[test]
563 fn enum_generic_unresolved() {
564 check(
565 r#"
566enum Kind<T> { A { x: T }, B }
567fn foo<T>(x: T) { let _ = $0Kind::A { x }$0; }
568"#,
569 "kind",
570 );
571 }
572
573 #[test]
574 fn dyn_trait() {
575 check(
576 r#"
577trait DynHandler {}
578fn bar() -> dyn DynHandler {}
579fn foo() { $0(bar())$0; }
580"#,
581 "dyn_handler",
582 );
583 }
584
585 #[test]
586 fn impl_trait() {
587 check(
588 r#"
589trait StaticHandler {}
590fn bar() -> impl StaticHandler {}
591fn foo() { $0(bar())$0; }
592"#,
593 "static_handler",
594 );
595 }
596
597 #[test]
598 fn impl_trait_plus_clone() {
599 check(
600 r#"
601trait StaticHandler {}
602trait Clone {}
603fn bar() -> impl StaticHandler + Clone {}
604fn foo() { $0(bar())$0; }
605"#,
606 "static_handler",
607 );
608 }
609
610 #[test]
611 fn impl_trait_plus_lifetime() {
612 check(
613 r#"
614trait StaticHandler {}
615trait Clone {}
616fn bar<'a>(&'a i32) -> impl StaticHandler + 'a {}
617fn foo() { $0(bar(&1))$0; }
618"#,
619 "static_handler",
620 );
621 }
622
623 #[test]
624 fn impl_trait_plus_trait() {
625 check(
626 r#"
627trait Handler {}
628trait StaticHandler {}
629fn bar() -> impl StaticHandler + Handler {}
630fn foo() { $0(bar())$0; }
631"#,
632 "bar",
633 );
634 }
635
636 #[test]
637 fn ref_value() {
638 check(
639 r#"
640struct Seed;
641fn bar() -> &Seed {}
642fn foo() { $0(bar())$0; }
643"#,
644 "seed",
645 );
646 }
647
648 #[test]
649 fn box_value() {
650 check(
651 r#"
652struct Box<T>(*const T);
653struct Seed;
654fn bar() -> Box<Seed> {}
655fn foo() { $0(bar())$0; }
656"#,
657 "seed",
658 );
659 }
660
661 #[test]
662 fn box_generic() {
663 check(
664 r#"
665struct Box<T>(*const T);
666fn bar<T>() -> Box<T> {}
667fn foo<T>() { $0(bar::<T>())$0; }
668"#,
669 "bar",
670 );
671 }
672
673 #[test]
674 fn option_value() {
675 check(
676 r#"
677enum Option<T> { Some(T) }
678struct Seed;
679fn bar() -> Option<Seed> {}
680fn foo() { $0(bar())$0; }
681"#,
682 "seed",
683 );
684 }
685
686 #[test]
687 fn result_value() {
688 check(
689 r#"
690enum Result<T, E> { Ok(T), Err(E) }
691struct Seed;
692struct Error;
693fn bar() -> Result<Seed, Error> {}
694fn foo() { $0(bar())$0; }
695"#,
696 "seed",
697 );
698 }
699
700 #[test]
701 fn ref_call() {
702 check(
703 r#"
704fn foo() { $0&bar(1, 3)$0 }
705"#,
706 "bar",
707 );
708 }
709
710 #[test]
711 fn name_to_string() {
712 check(
713 r#"
714fn foo() { $0function.name().to_string()$0 }
715"#,
716 "name",
717 );
718 }
719
720 #[test]
721 fn nested_useless_method() {
722 check(
723 r#"
724fn foo() { $0function.name().as_ref().unwrap().to_string()$0 }
725"#,
726 "name",
727 );
728 }
729}
diff --git a/crates/ide_completion/src/completions/attribute.rs b/crates/ide_completion/src/completions/attribute.rs
index 3a5bc4381..cb05e85fc 100644
--- a/crates/ide_completion/src/completions/attribute.rs
+++ b/crates/ide_completion/src/completions/attribute.rs
@@ -39,7 +39,8 @@ pub(crate) fn complete_attribute(acc: &mut Completions, ctx: &CompletionContext)
39} 39}
40 40
41fn complete_attribute_start(acc: &mut Completions, ctx: &CompletionContext, attribute: &ast::Attr) { 41fn complete_attribute_start(acc: &mut Completions, ctx: &CompletionContext, attribute: &ast::Attr) {
42 for attr_completion in ATTRIBUTES { 42 let is_inner = attribute.kind() == ast::AttrKind::Inner;
43 for attr_completion in ATTRIBUTES.iter().filter(|compl| is_inner || !compl.prefer_inner) {
43 let mut item = CompletionItem::new( 44 let mut item = CompletionItem::new(
44 CompletionKind::Attribute, 45 CompletionKind::Attribute,
45 ctx.source_range(), 46 ctx.source_range(),
diff --git a/crates/ide_completion/src/completions/fn_param.rs b/crates/ide_completion/src/completions/fn_param.rs
index 38e33a93e..1bcc8727f 100644
--- a/crates/ide_completion/src/completions/fn_param.rs
+++ b/crates/ide_completion/src/completions/fn_param.rs
@@ -25,9 +25,12 @@ pub(crate) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
25 return; 25 return;
26 } 26 }
27 func.param_list().into_iter().flat_map(|it| it.params()).for_each(|param| { 27 func.param_list().into_iter().flat_map(|it| it.params()).for_each(|param| {
28 let text = param.syntax().text().to_string(); 28 if let Some(pat) = param.pat() {
29 params.entry(text).or_insert(param); 29 let text = param.syntax().text().to_string();
30 }) 30 let lookup = pat.syntax().text().to_string();
31 params.entry(text).or_insert(lookup);
32 }
33 });
31 }; 34 };
32 35
33 for node in ctx.token.parent().ancestors() { 36 for node in ctx.token.parent().ancestors() {
@@ -50,18 +53,12 @@ pub(crate) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
50 }; 53 };
51 } 54 }
52 55
53 params 56 params.into_iter().for_each(|(label, lookup)| {
54 .into_iter() 57 CompletionItem::new(CompletionKind::Magic, ctx.source_range(), label)
55 .filter_map(|(label, param)| { 58 .kind(CompletionItemKind::Binding)
56 let lookup = param.pat()?.syntax().text().to_string(); 59 .lookup_by(lookup)
57 Some((label, lookup)) 60 .add_to(acc)
58 }) 61 });
59 .for_each(|(label, lookup)| {
60 CompletionItem::new(CompletionKind::Magic, ctx.source_range(), label)
61 .kind(CompletionItemKind::Binding)
62 .lookup_by(lookup)
63 .add_to(acc)
64 });
65} 62}
66 63
67#[cfg(test)] 64#[cfg(test)]
diff --git a/crates/ide_completion/src/completions/keyword.rs b/crates/ide_completion/src/completions/keyword.rs
index eb81f9765..03c6dd454 100644
--- a/crates/ide_completion/src/completions/keyword.rs
+++ b/crates/ide_completion/src/completions/keyword.rs
@@ -1,5 +1,7 @@
1//! Completes keywords. 1//! Completes keywords.
2 2
3use std::iter;
4
3use syntax::SyntaxKind; 5use syntax::SyntaxKind;
4use test_utils::mark; 6use test_utils::mark;
5 7
@@ -19,10 +21,14 @@ pub(crate) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionC
19 CompletionItem::new(CompletionKind::Keyword, source_range, "self") 21 CompletionItem::new(CompletionKind::Keyword, source_range, "self")
20 .kind(CompletionItemKind::Keyword) 22 .kind(CompletionItemKind::Keyword)
21 .add_to(acc); 23 .add_to(acc);
22 CompletionItem::new(CompletionKind::Keyword, source_range, "super::") 24 if iter::successors(ctx.path_qual.clone(), |p| p.qualifier())
23 .kind(CompletionItemKind::Keyword) 25 .all(|p| p.segment().and_then(|s| s.super_token()).is_some())
24 .insert_text("super::") 26 {
25 .add_to(acc); 27 CompletionItem::new(CompletionKind::Keyword, source_range, "super::")
28 .kind(CompletionItemKind::Keyword)
29 .insert_text("super::")
30 .add_to(acc);
31 }
26 } 32 }
27 33
28 // Suggest .await syntax for types that implement Future trait 34 // Suggest .await syntax for types that implement Future trait
@@ -85,6 +91,7 @@ pub(crate) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
85 if ctx.is_expr { 91 if ctx.is_expr {
86 add_keyword(ctx, acc, "match", "match $0 {}"); 92 add_keyword(ctx, acc, "match", "match $0 {}");
87 add_keyword(ctx, acc, "while", "while $0 {}"); 93 add_keyword(ctx, acc, "while", "while $0 {}");
94 add_keyword(ctx, acc, "while let", "while let $1 = $0 {}");
88 add_keyword(ctx, acc, "loop", "loop {$0}"); 95 add_keyword(ctx, acc, "loop", "loop {$0}");
89 add_keyword(ctx, acc, "if", "if $0 {}"); 96 add_keyword(ctx, acc, "if", "if $0 {}");
90 add_keyword(ctx, acc, "if let", "if let $1 = $0 {}"); 97 add_keyword(ctx, acc, "if let", "if let $1 = $0 {}");
@@ -204,9 +211,17 @@ mod tests {
204 "#]], 211 "#]],
205 ); 212 );
206 213
214 // FIXME: `self` shouldn't be shown here and the check below
207 check( 215 check(
208 r"use a::$0", 216 r"use a::$0",
209 expect![[r#" 217 expect![[r#"
218 kw self
219 "#]],
220 );
221
222 check(
223 r"use super::$0",
224 expect![[r#"
210 kw self 225 kw self
211 kw super:: 226 kw super::
212 "#]], 227 "#]],
@@ -215,9 +230,8 @@ mod tests {
215 check( 230 check(
216 r"use a::{b, $0}", 231 r"use a::{b, $0}",
217 expect![[r#" 232 expect![[r#"
218 kw self 233 kw self
219 kw super:: 234 "#]],
220 "#]],
221 ); 235 );
222 } 236 }
223 237
@@ -256,6 +270,7 @@ mod tests {
256 kw trait 270 kw trait
257 kw match 271 kw match
258 kw while 272 kw while
273 kw while let
259 kw loop 274 kw loop
260 kw if 275 kw if
261 kw if let 276 kw if let
@@ -283,6 +298,7 @@ mod tests {
283 kw trait 298 kw trait
284 kw match 299 kw match
285 kw while 300 kw while
301 kw while let
286 kw loop 302 kw loop
287 kw if 303 kw if
288 kw if let 304 kw if let
@@ -310,6 +326,7 @@ mod tests {
310 kw trait 326 kw trait
311 kw match 327 kw match
312 kw while 328 kw while
329 kw while let
313 kw loop 330 kw loop
314 kw if 331 kw if
315 kw if let 332 kw if let
@@ -344,6 +361,7 @@ fn quux() -> i32 {
344 expect![[r#" 361 expect![[r#"
345 kw match 362 kw match
346 kw while 363 kw while
364 kw while let
347 kw loop 365 kw loop
348 kw if 366 kw if
349 kw if let 367 kw if let
@@ -393,6 +411,7 @@ fn quux() -> i32 {
393 kw trait 411 kw trait
394 kw match 412 kw match
395 kw while 413 kw while
414 kw while let
396 kw loop 415 kw loop
397 kw if 416 kw if
398 kw if let 417 kw if let
@@ -552,6 +571,7 @@ pub mod future {
552 expect![[r#" 571 expect![[r#"
553 kw match 572 kw match
554 kw while 573 kw while
574 kw while let
555 kw loop 575 kw loop
556 kw if 576 kw if
557 kw if let 577 kw if let
@@ -611,6 +631,7 @@ fn foo() {
611 expect![[r#" 631 expect![[r#"
612 kw match 632 kw match
613 kw while 633 kw while
634 kw while let
614 kw loop 635 kw loop
615 kw if 636 kw if
616 kw if let 637 kw if let
diff --git a/crates/ide_completion/src/completions/qualified_path.rs b/crates/ide_completion/src/completions/qualified_path.rs
index 2afa6979e..72fb757b1 100644
--- a/crates/ide_completion/src/completions/qualified_path.rs
+++ b/crates/ide_completion/src/completions/qualified_path.rs
@@ -81,9 +81,7 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
81 return None; 81 return None;
82 } 82 }
83 match item { 83 match item {
84 hir::AssocItem::Function(func) => { 84 hir::AssocItem::Function(func) => acc.add_function(ctx, func, None),
85 acc.add_function(ctx, func, None);
86 }
87 hir::AssocItem::Const(ct) => acc.add_const(ctx, ct), 85 hir::AssocItem::Const(ct) => acc.add_const(ctx, ct),
88 hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), 86 hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty),
89 } 87 }
@@ -110,9 +108,7 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
110 continue; 108 continue;
111 } 109 }
112 match item { 110 match item {
113 hir::AssocItem::Function(func) => { 111 hir::AssocItem::Function(func) => acc.add_function(ctx, func, None),
114 acc.add_function(ctx, func, None);
115 }
116 hir::AssocItem::Const(ct) => acc.add_const(ctx, ct), 112 hir::AssocItem::Const(ct) => acc.add_const(ctx, ct),
117 hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), 113 hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty),
118 } 114 }
@@ -143,9 +139,7 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
143 // them. 139 // them.
144 if seen.insert(item) { 140 if seen.insert(item) {
145 match item { 141 match item {
146 hir::AssocItem::Function(func) => { 142 hir::AssocItem::Function(func) => acc.add_function(ctx, func, None),
147 acc.add_function(ctx, func, None);
148 }
149 hir::AssocItem::Const(ct) => acc.add_const(ctx, ct), 143 hir::AssocItem::Const(ct) => acc.add_const(ctx, ct),
150 hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), 144 hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty),
151 } 145 }
diff --git a/crates/ide_db/src/helpers.rs b/crates/ide_db/src/helpers.rs
index f9de8ce0e..3ff77400b 100644
--- a/crates/ide_db/src/helpers.rs
+++ b/crates/ide_db/src/helpers.rs
@@ -41,6 +41,10 @@ pub struct FamousDefs<'a, 'b>(pub &'a Semantics<'b, RootDatabase>, pub Option<Cr
41impl FamousDefs<'_, '_> { 41impl FamousDefs<'_, '_> {
42 pub const FIXTURE: &'static str = include_str!("helpers/famous_defs_fixture.rs"); 42 pub const FIXTURE: &'static str = include_str!("helpers/famous_defs_fixture.rs");
43 43
44 pub fn std(&self) -> Option<Crate> {
45 self.find_crate("std")
46 }
47
44 pub fn core(&self) -> Option<Crate> { 48 pub fn core(&self) -> Option<Crate> {
45 self.find_crate("core") 49 self.find_crate("core")
46 } 50 }
diff --git a/crates/ide_db/src/helpers/famous_defs_fixture.rs b/crates/ide_db/src/helpers/famous_defs_fixture.rs
index bb4e9666b..d3464ae17 100644
--- a/crates/ide_db/src/helpers/famous_defs_fixture.rs
+++ b/crates/ide_db/src/helpers/famous_defs_fixture.rs
@@ -129,3 +129,11 @@ pub mod prelude {
129} 129}
130#[prelude_import] 130#[prelude_import]
131pub use prelude::*; 131pub use prelude::*;
132//- /libstd.rs crate:std deps:core
133//! Signatures of traits, types and functions from the std lib for use in tests.
134
135/// Docs for return_keyword
136mod return_keyword {}
137
138/// Docs for prim_str
139mod prim_str {}
diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml
index bbee2e32c..bb2656a80 100644
--- a/crates/mbe/Cargo.toml
+++ b/crates/mbe/Cargo.toml
@@ -18,9 +18,7 @@ syntax = { path = "../syntax", version = "0.0.0" }
18parser = { path = "../parser", version = "0.0.0" } 18parser = { path = "../parser", version = "0.0.0" }
19tt = { path = "../tt", version = "0.0.0" } 19tt = { path = "../tt", version = "0.0.0" }
20test_utils = { path = "../test_utils", version = "0.0.0" } 20test_utils = { path = "../test_utils", version = "0.0.0" }
21stdx = { path = "../stdx", version = "0.0.0" }
21 22
22# FIXME: Paper over a bug in cargo-worspaces which block publishing 23[dev-dependencies]
23# https://github.com/pksunkara/cargo-workspaces/issues/39 24profile = { path = "../profile" }
24# [dev-dependencies]
25profile = { path = "../profile", version = "0.0.0" }
26
diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs
index 6d81be880..503ad1355 100644
--- a/crates/mbe/src/benchmark.rs
+++ b/crates/mbe/src/benchmark.rs
@@ -40,18 +40,12 @@ fn benchmark_expand_macro_rules() {
40 .into_iter() 40 .into_iter()
41 .map(|(id, tt)| { 41 .map(|(id, tt)| {
42 let res = rules[&id].expand(&tt); 42 let res = rules[&id].expand(&tt);
43 if res.err.is_some() { 43 assert!(res.err.is_none());
44 // FIXME:
45 // Currently `invocation_fixtures` will generate some correct invocations but
46 // cannot be expanded by mbe. We ignore errors here.
47 // See: https://github.com/rust-analyzer/rust-analyzer/issues/4777
48 eprintln!("err from {} {:?}", id, res.err);
49 }
50 res.value.token_trees.len() 44 res.value.token_trees.len()
51 }) 45 })
52 .sum() 46 .sum()
53 }; 47 };
54 assert_eq!(hash, 66995); 48 assert_eq!(hash, 69413);
55} 49}
56 50
57fn macro_rules_fixtures() -> FxHashMap<String, MacroRules> { 51fn macro_rules_fixtures() -> FxHashMap<String, MacroRules> {
@@ -77,7 +71,7 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
77 .collect() 71 .collect()
78} 72}
79 73
80// Generate random invocation fixtures from rules 74/// Generate random invocation fixtures from rules
81fn invocation_fixtures(rules: &FxHashMap<String, MacroRules>) -> Vec<(String, tt::Subtree)> { 75fn invocation_fixtures(rules: &FxHashMap<String, MacroRules>) -> Vec<(String, tt::Subtree)> {
82 let mut seed = 123456789; 76 let mut seed = 123456789;
83 let mut res = Vec::new(); 77 let mut res = Vec::new();
@@ -86,11 +80,31 @@ fn invocation_fixtures(rules: &FxHashMap<String, MacroRules>) -> Vec<(String, tt
86 for rule in &it.rules { 80 for rule in &it.rules {
87 // Generate twice 81 // Generate twice
88 for _ in 0..2 { 82 for _ in 0..2 {
89 let mut subtree = tt::Subtree::default(); 83 // The input are generated by filling the `Op` randomly.
90 for op in rule.lhs.iter() { 84 // However, there are some cases generated are ambiguous for expanding, for example:
91 collect_from_op(op, &mut subtree, &mut seed); 85 // ```rust
86 // macro_rules! m {
87 // ($($t:ident),* as $ty:ident) => {}
88 // }
89 // m!(as u32); // error: local ambiguity: multiple parsing options: built-in NTs ident ('t') or 1 other option.
90 // ```
91 //
92 // So we just skip any error cases and try again
93 let mut try_cnt = 0;
94 loop {
95 let mut subtree = tt::Subtree::default();
96 for op in rule.lhs.iter() {
97 collect_from_op(op, &mut subtree, &mut seed);
98 }
99 if it.expand(&subtree).err.is_none() {
100 res.push((name.clone(), subtree));
101 break;
102 }
103 try_cnt += 1;
104 if try_cnt > 100 {
105 panic!("invocaton fixture {} cannot be generated.\n", name);
106 }
92 } 107 }
93 res.push((name.clone(), subtree));
94 } 108 }
95 } 109 }
96 } 110 }
diff --git a/crates/mbe/src/expander.rs b/crates/mbe/src/expander.rs
index e7e14b3cc..2efff8f52 100644
--- a/crates/mbe/src/expander.rs
+++ b/crates/mbe/src/expander.rs
@@ -5,7 +5,7 @@
5mod matcher; 5mod matcher;
6mod transcriber; 6mod transcriber;
7 7
8use rustc_hash::FxHashMap; 8use smallvec::SmallVec;
9use syntax::SmolStr; 9use syntax::SmolStr;
10 10
11use crate::{ExpandError, ExpandResult}; 11use crate::{ExpandError, ExpandResult};
@@ -28,10 +28,10 @@ pub(crate) fn expand_rules(
28 return ExpandResult::ok(value); 28 return ExpandResult::ok(value);
29 } 29 }
30 } 30 }
31 // Use the rule if we matched more tokens, or had fewer errors 31 // Use the rule if we matched more tokens, or bound variables count
32 if let Some((prev_match, _)) = &match_ { 32 if let Some((prev_match, _)) = &match_ {
33 if (new_match.unmatched_tts, new_match.err_count) 33 if (new_match.unmatched_tts, -(new_match.bound_count as i32))
34 < (prev_match.unmatched_tts, prev_match.err_count) 34 < (prev_match.unmatched_tts, -(prev_match.bound_count as i32))
35 { 35 {
36 match_ = Some((new_match, rule)); 36 match_ = Some((new_match, rule));
37 } 37 }
@@ -94,19 +94,19 @@ pub(crate) fn expand_rules(
94/// In other words, `Bindings` is a *multi* mapping from `SmolStr` to 94/// In other words, `Bindings` is a *multi* mapping from `SmolStr` to
95/// `tt::TokenTree`, where the index to select a particular `TokenTree` among 95/// `tt::TokenTree`, where the index to select a particular `TokenTree` among
96/// many is not a plain `usize`, but an `&[usize]`. 96/// many is not a plain `usize`, but an `&[usize]`.
97#[derive(Debug, Default)] 97#[derive(Debug, Default, Clone, PartialEq, Eq)]
98struct Bindings { 98struct Bindings {
99 inner: FxHashMap<SmolStr, Binding>, 99 inner: SmallVec<[(SmolStr, Binding); 4]>,
100} 100}
101 101
102#[derive(Debug)] 102#[derive(Debug, Clone, PartialEq, Eq)]
103enum Binding { 103enum Binding {
104 Fragment(Fragment), 104 Fragment(Fragment),
105 Nested(Vec<Binding>), 105 Nested(Vec<Binding>),
106 Empty, 106 Empty,
107} 107}
108 108
109#[derive(Debug, Clone)] 109#[derive(Debug, Clone, PartialEq, Eq)]
110enum Fragment { 110enum Fragment {
111 /// token fragments are just copy-pasted into the output 111 /// token fragments are just copy-pasted into the output
112 Tokens(tt::TokenTree), 112 Tokens(tt::TokenTree),
diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs
index e3bd4c09a..9d3d28055 100644
--- a/crates/mbe/src/expander/matcher.rs
+++ b/crates/mbe/src/expander/matcher.rs
@@ -1,14 +1,74 @@
1//! FIXME: write short doc here 1//! An NFA-based parser, which is porting from rustc mbe parsing code
2//!
3//! See https://github.com/rust-lang/rust/blob/70b18bc2cbac4712020019f5bf57c00905373205/compiler/rustc_expand/src/mbe/macro_parser.rs
4//! Here is a quick intro to how the parser works, copied from rustc:
5//!
6//! A 'position' is a dot in the middle of a matcher, usually represented as a
7//! dot. For example `· a $( a )* a b` is a position, as is `a $( · a )* a b`.
8//!
9//! The parser walks through the input a character at a time, maintaining a list
10//! of threads consistent with the current position in the input string: `cur_items`.
11//!
12//! As it processes them, it fills up `eof_items` with threads that would be valid if
13//! the macro invocation is now over, `bb_items` with threads that are waiting on
14//! a Rust non-terminal like `$e:expr`, and `next_items` with threads that are waiting
15//! on a particular token. Most of the logic concerns moving the · through the
16//! repetitions indicated by Kleene stars. The rules for moving the · without
17//! consuming any input are called epsilon transitions. It only advances or calls
18//! out to the real Rust parser when no `cur_items` threads remain.
19//!
20//! Example:
21//!
22//! ```text, ignore
23//! Start parsing a a a a b against [· a $( a )* a b].
24//!
25//! Remaining input: a a a a b
26//! next: [· a $( a )* a b]
27//!
28//! - - - Advance over an a. - - -
29//!
30//! Remaining input: a a a b
31//! cur: [a · $( a )* a b]
32//! Descend/Skip (first item).
33//! next: [a $( · a )* a b] [a $( a )* · a b].
34//!
35//! - - - Advance over an a. - - -
36//!
37//! Remaining input: a a b
38//! cur: [a $( a · )* a b] [a $( a )* a · b]
39//! Follow epsilon transition: Finish/Repeat (first item)
40//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
41//!
42//! - - - Advance over an a. - - - (this looks exactly like the last step)
43//!
44//! Remaining input: a b
45//! cur: [a $( a · )* a b] [a $( a )* a · b]
46//! Follow epsilon transition: Finish/Repeat (first item)
47//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
48//!
49//! - - - Advance over an a. - - - (this looks exactly like the last step)
50//!
51//! Remaining input: b
52//! cur: [a $( a · )* a b] [a $( a )* a · b]
53//! Follow epsilon transition: Finish/Repeat (first item)
54//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
55//!
56//! - - - Advance over a b. - - -
57//!
58//! Remaining input: ''
59//! eof: [a $( a )* a b ·]
60//! ```
2 61
3use crate::{ 62use crate::{
4 expander::{Binding, Bindings, Fragment}, 63 expander::{Binding, Bindings, Fragment},
5 parser::{Op, RepeatKind, Separator}, 64 parser::{Op, OpDelimited, OpDelimitedIter, RepeatKind, Separator},
6 tt_iter::TtIter, 65 tt_iter::TtIter,
7 ExpandError, MetaTemplate, 66 ExpandError, MetaTemplate,
8}; 67};
9 68
10use super::ExpandResult; 69use super::ExpandResult;
11use parser::FragmentKind::*; 70use parser::FragmentKind::*;
71use smallvec::{smallvec, SmallVec};
12use syntax::SmolStr; 72use syntax::SmolStr;
13 73
14impl Bindings { 74impl Bindings {
@@ -16,19 +76,19 @@ impl Bindings {
16 // FIXME: Do we have a better way to represent an empty token ? 76 // FIXME: Do we have a better way to represent an empty token ?
17 // Insert an empty subtree for empty token 77 // Insert an empty subtree for empty token
18 let tt = tt::Subtree::default().into(); 78 let tt = tt::Subtree::default().into();
19 self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt))); 79 self.inner.push((name.clone(), Binding::Fragment(Fragment::Tokens(tt))));
20 } 80 }
21 81
22 fn push_empty(&mut self, name: &SmolStr) { 82 fn push_empty(&mut self, name: &SmolStr) {
23 self.inner.insert(name.clone(), Binding::Empty); 83 self.inner.push((name.clone(), Binding::Empty));
24 } 84 }
25 85
26 fn push_nested(&mut self, idx: usize, nested: Bindings) -> Result<(), ExpandError> { 86 fn push_nested(&mut self, idx: usize, nested: Bindings) -> Result<(), ExpandError> {
27 for (key, value) in nested.inner { 87 for (key, value) in nested.inner {
28 if !self.inner.contains_key(&key) { 88 if self.get_mut(&key).is_none() {
29 self.inner.insert(key.clone(), Binding::Nested(Vec::new())); 89 self.inner.push((key.clone(), Binding::Nested(Vec::new())));
30 } 90 }
31 match self.inner.get_mut(&key) { 91 match self.get_mut(&key) {
32 Some(Binding::Nested(it)) => { 92 Some(Binding::Nested(it)) => {
33 // insert empty nested bindings before this one 93 // insert empty nested bindings before this one
34 while it.len() < idx { 94 while it.len() < idx {
@@ -46,6 +106,14 @@ impl Bindings {
46 } 106 }
47 Ok(()) 107 Ok(())
48 } 108 }
109
110 fn get_mut(&mut self, name: &str) -> Option<&mut Binding> {
111 self.inner.iter_mut().find_map(|(n, b)| if n == name { Some(b) } else { None })
112 }
113
114 fn bindings(&self) -> impl Iterator<Item = &Binding> {
115 self.inner.iter().map(|(_, b)| b)
116 }
49} 117}
50 118
51macro_rules! err { 119macro_rules! err {
@@ -57,7 +125,7 @@ macro_rules! err {
57 }; 125 };
58} 126}
59 127
60#[derive(Debug, Default)] 128#[derive(Clone, Debug, Default, PartialEq, Eq)]
61pub(super) struct Match { 129pub(super) struct Match {
62 pub(super) bindings: Bindings, 130 pub(super) bindings: Bindings,
63 /// We currently just keep the first error and count the rest to compare matches. 131 /// We currently just keep the first error and count the rest to compare matches.
@@ -65,6 +133,8 @@ pub(super) struct Match {
65 pub(super) err_count: usize, 133 pub(super) err_count: usize,
66 /// How many top-level token trees were left to match. 134 /// How many top-level token trees were left to match.
67 pub(super) unmatched_tts: usize, 135 pub(super) unmatched_tts: usize,
136 /// The number of bound variables
137 pub(super) bound_count: usize,
68} 138}
69 139
70impl Match { 140impl Match {
@@ -76,72 +146,373 @@ impl Match {
76} 146}
77 147
78/// Matching errors are added to the `Match`. 148/// Matching errors are added to the `Match`.
79pub(super) fn match_(pattern: &MetaTemplate, src: &tt::Subtree) -> Match { 149pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree) -> Match {
80 let mut res = Match::default(); 150 let mut res = match_loop(pattern, &input);
81 let mut src = TtIter::new(src); 151 res.bound_count = count(res.bindings.bindings());
152 return res;
153
154 fn count<'a>(bindings: impl Iterator<Item = &'a Binding>) -> usize {
155 bindings
156 .map(|it| match it {
157 Binding::Fragment(_) => 1,
158 Binding::Empty => 1,
159 Binding::Nested(it) => count(it.iter()),
160 })
161 .sum()
162 }
163}
82 164
83 match_tokens(&mut res, pattern, &mut src); 165#[derive(Debug, Clone)]
166struct MatchState<'t> {
167 /// The position of the "dot" in this matcher
168 dot: OpDelimitedIter<'t>,
84 169
85 if src.len() > 0 { 170 /// Token subtree stack
86 res.unmatched_tts += src.len(); 171 /// When matching against matchers with nested delimited submatchers (e.g., `pat ( pat ( .. )
87 res.add_err(err!("leftover tokens")); 172 /// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does
88 } 173 /// that where the bottom of the stack is the outermost matcher.
174 stack: SmallVec<[OpDelimitedIter<'t>; 4]>,
175
176 /// The "parent" matcher position if we are in a repetition. That is, the matcher position just
177 /// before we enter the repetition.
178 up: Option<Box<MatchState<'t>>>,
179
180 /// The separator if we are in a repetition.
181 sep: Option<Separator>,
182
183 /// The KleeneOp of this sequence if we are in a repetition.
184 sep_kind: Option<RepeatKind>,
89 185
90 res 186 /// Number of tokens of seperator parsed
187 sep_parsed: Option<usize>,
188
189 /// Matched meta variables bindings
190 bindings: SmallVec<[Bindings; 4]>,
191
192 /// Cached result of meta variable parsing
193 meta_result: Option<(TtIter<'t>, ExpandResult<Option<Fragment>>)>,
194
195 /// Is error occuried in this state, will `poised` to "parent"
196 is_error: bool,
91} 197}
92 198
93fn match_tokens(res: &mut Match, pattern: &MetaTemplate, src: &mut TtIter) { 199/// Process the matcher positions of `cur_items` until it is empty. In the process, this will
94 for op in pattern.iter() { 200/// produce more items in `next_items`, `eof_items`, and `bb_items`.
95 match op { 201///
96 Op::Leaf(lhs) => { 202/// For more info about the how this happens, see the module-level doc comments and the inline
97 if let Err(err) = match_leaf(lhs, src) { 203/// comments of this function.
98 res.add_err(err); 204///
99 continue; 205/// # Parameters
206///
207/// - `src`: the current token of the parser.
208/// - `stack`: the "parent" frames of the token tree
209/// - `res`: the match result to store errors
210/// - `cur_items`: the set of current items to be processed. This should be empty by the end of a
211/// successful execution of this function.
212/// - `next_items`: the set of newly generated items. These are used to replenish `cur_items` in
213/// the function `parse`.
214/// - `eof_items`: the set of items that would be valid if this was the EOF.
215/// - `bb_items`: the set of items that are waiting for the black-box parser.
216/// - `error_items`: the set of items in errors, used for error-resilient parsing
217fn match_loop_inner<'t>(
218 src: TtIter<'t>,
219 stack: &[TtIter<'t>],
220 res: &mut Match,
221 cur_items: &mut SmallVec<[MatchState<'t>; 1]>,
222 bb_items: &mut SmallVec<[MatchState<'t>; 1]>,
223 next_items: &mut Vec<MatchState<'t>>,
224 eof_items: &mut SmallVec<[MatchState<'t>; 1]>,
225 error_items: &mut SmallVec<[MatchState<'t>; 1]>,
226) {
227 macro_rules! try_push {
228 ($items: expr, $it:expr) => {
229 if $it.is_error {
230 error_items.push($it);
231 } else {
232 $items.push($it);
233 }
234 };
235 }
236
237 while let Some(mut item) = cur_items.pop() {
238 while item.dot.is_eof() {
239 match item.stack.pop() {
240 Some(frame) => {
241 item.dot = frame;
242 item.dot.next();
100 } 243 }
244 None => break,
101 } 245 }
102 Op::Subtree { tokens, delimiter: delim } => { 246 }
103 let rhs = match src.expect_subtree() { 247 let op = match item.dot.peek() {
104 Ok(s) => s, 248 None => {
105 Err(()) => { 249 // We are at or past the end of the matcher of `item`.
106 res.add_err(err!("expected subtree")); 250 if item.up.is_some() {
107 continue; 251 if item.sep_parsed.is_none() {
252 // Get the `up` matcher
253 let mut new_pos = *item.up.clone().unwrap();
254 // Add matches from this repetition to the `matches` of `up`
255 if let Some(bindings) = new_pos.bindings.last_mut() {
256 for (i, b) in item.bindings.iter_mut().enumerate() {
257 bindings.push_nested(i, b.clone()).unwrap();
258 }
259 }
260 // Move the "dot" past the repetition in `up`
261 new_pos.dot.next();
262 new_pos.is_error = new_pos.is_error || item.is_error;
263 cur_items.push(new_pos);
264 }
265
266 // Check if we need a separator.
267 // We check the separator one by one
268 let sep_idx = *item.sep_parsed.as_ref().unwrap_or(&0);
269 let sep_len = item.sep.as_ref().map_or(0, Separator::tt_count);
270 if item.sep.is_some() && sep_idx != sep_len {
271 let sep = item.sep.as_ref().unwrap();
272 if src.clone().expect_separator(&sep, sep_idx) {
273 item.dot.next();
274 item.sep_parsed = Some(sep_idx + 1);
275 try_push!(next_items, item);
276 }
277 }
278 // We don't need a separator. Move the "dot" back to the beginning of the matcher
279 // and try to match again UNLESS we are only allowed to have _one_ repetition.
280 else if item.sep_kind != Some(RepeatKind::ZeroOrOne) {
281 item.dot = item.dot.reset();
282 item.sep_parsed = None;
283 item.bindings.push(Bindings::default());
284 cur_items.push(item);
285 }
286 } else {
287 // If we are not in a repetition, then being at the end of a matcher means that we have
288 // reached the potential end of the input.
289 try_push!(eof_items, item);
290 }
291 continue;
292 }
293 Some(it) => it,
294 };
295
296 // We are in the middle of a matcher.
297 match op {
298 OpDelimited::Op(Op::Repeat { tokens, kind, separator }) => {
299 if matches!(kind, RepeatKind::ZeroOrMore | RepeatKind::ZeroOrOne) {
300 let mut new_item = item.clone();
301 new_item.dot.next();
302 let mut vars = Vec::new();
303 let bindings = new_item.bindings.last_mut().unwrap();
304 collect_vars(&mut vars, tokens);
305 for var in vars {
306 bindings.push_empty(&var);
108 } 307 }
109 }; 308 cur_items.push(new_item);
110 if delim.map(|it| it.kind) != rhs.delimiter_kind() {
111 res.add_err(err!("mismatched delimiter"));
112 continue;
113 } 309 }
114 let mut src = TtIter::new(rhs); 310 cur_items.push(MatchState {
115 match_tokens(res, tokens, &mut src); 311 dot: tokens.iter_delimited(None),
116 if src.len() > 0 { 312 stack: Default::default(),
117 res.add_err(err!("leftover tokens")); 313 up: Some(Box::new(item)),
314 sep: separator.clone(),
315 sep_kind: Some(*kind),
316 sep_parsed: None,
317 bindings: smallvec![Bindings::default()],
318 meta_result: None,
319 is_error: false,
320 })
321 }
322 OpDelimited::Op(Op::Subtree { tokens, delimiter }) => {
323 if let Ok(subtree) = src.clone().expect_subtree() {
324 if subtree.delimiter_kind() == delimiter.map(|it| it.kind) {
325 item.stack.push(item.dot);
326 item.dot = tokens.iter_delimited(delimiter.as_ref());
327 cur_items.push(item);
328 }
118 } 329 }
119 } 330 }
120 Op::Var { name, kind, .. } => { 331 OpDelimited::Op(Op::Var { kind, name, .. }) => {
121 let kind = match kind { 332 if let Some(kind) = kind {
122 Some(k) => k, 333 let mut fork = src.clone();
123 None => { 334 let match_res = match_meta_var(kind.as_str(), &mut fork);
124 res.add_err(ExpandError::UnexpectedToken); 335 match match_res.err {
125 continue; 336 None => {
337 // Some meta variables are optional (e.g. vis)
338 if match_res.value.is_some() {
339 item.meta_result = Some((fork, match_res));
340 try_push!(bb_items, item);
341 } else {
342 item.bindings.last_mut().unwrap().push_optional(name);
343 item.dot.next();
344 cur_items.push(item);
345 }
346 }
347 Some(err) => {
348 res.add_err(err);
349 match match_res.value {
350 Some(fragment) => {
351 item.bindings
352 .last_mut()
353 .unwrap()
354 .inner
355 .push((name.clone(), Binding::Fragment(fragment)));
356 }
357 _ => {}
358 }
359 item.is_error = true;
360 error_items.push(item);
361 }
126 } 362 }
127 }; 363 }
128 let ExpandResult { value: matched, err: match_err } = 364 }
129 match_meta_var(kind.as_str(), src); 365 OpDelimited::Op(Op::Leaf(leaf)) => {
130 match matched { 366 if let Err(err) = match_leaf(&leaf, &mut src.clone()) {
367 res.add_err(err);
368 item.is_error = true;
369 } else {
370 item.dot.next();
371 }
372 try_push!(next_items, item);
373 }
374 OpDelimited::Open => {
375 if matches!(src.clone().next(), Some(tt::TokenTree::Subtree(..))) {
376 item.dot.next();
377 try_push!(next_items, item);
378 }
379 }
380 OpDelimited::Close => {
381 let is_delim_closed = src.peek_n(0).is_none() && !stack.is_empty();
382 if is_delim_closed {
383 item.dot.next();
384 try_push!(next_items, item);
385 }
386 }
387 }
388 }
389}
390
391fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match {
392 let mut src = TtIter::new(src);
393 let mut stack: SmallVec<[TtIter; 1]> = SmallVec::new();
394 let mut res = Match::default();
395 let mut error_reover_item = None;
396
397 let mut cur_items = smallvec![MatchState {
398 dot: pattern.iter_delimited(None),
399 stack: Default::default(),
400 up: None,
401 sep: None,
402 sep_kind: None,
403 sep_parsed: None,
404 bindings: smallvec![Bindings::default()],
405 is_error: false,
406 meta_result: None,
407 }];
408
409 let mut next_items = vec![];
410
411 loop {
412 let mut bb_items = SmallVec::new();
413 let mut eof_items = SmallVec::new();
414 let mut error_items = SmallVec::new();
415
416 stdx::always!(next_items.is_empty());
417
418 match_loop_inner(
419 src.clone(),
420 &stack,
421 &mut res,
422 &mut cur_items,
423 &mut bb_items,
424 &mut next_items,
425 &mut eof_items,
426 &mut error_items,
427 );
428 stdx::always!(cur_items.is_empty());
429
430 if error_items.len() > 0 {
431 error_reover_item = error_items.pop();
432 } else if eof_items.len() > 0 {
433 error_reover_item = Some(eof_items[0].clone());
434 }
435
436 // We need to do some post processing after the `match_loop_inner`.
437 // If we reached the EOF, check that there is EXACTLY ONE possible matcher. Otherwise,
438 // either the parse is ambiguous (which should never happen) or there is a syntax error.
439 if src.peek_n(0).is_none() && stack.is_empty() {
440 if eof_items.len() == 1 {
441 // remove all errors, because it is the correct answer !
442 res = Match::default();
443 res.bindings = eof_items[0].bindings[0].clone();
444 } else {
445 // Error recovery
446 if error_reover_item.is_some() {
447 res.bindings = error_reover_item.unwrap().bindings[0].clone();
448 }
449 res.add_err(ExpandError::UnexpectedToken);
450 }
451 return res;
452 }
453
454 // If there are no possible next positions AND we aren't waiting for the black-box parser,
455 // then there is a syntax error.
456 //
457 // Another possibility is that we need to call out to parse some rust nonterminal
458 // (black-box) parser. However, if there is not EXACTLY ONE of these, something is wrong.
459 if (bb_items.is_empty() && next_items.is_empty())
460 || (!bb_items.is_empty() && !next_items.is_empty())
461 || bb_items.len() > 1
462 {
463 res.unmatched_tts += src.len();
464 while let Some(it) = stack.pop() {
465 src = it;
466 res.unmatched_tts += src.len();
467 }
468 res.add_err(err!("leftover tokens"));
469
470 if let Some(mut error_reover_item) = error_reover_item {
471 res.bindings = error_reover_item.bindings.remove(0);
472 }
473 return res;
474 }
475 // Dump all possible `next_items` into `cur_items` for the next iteration.
476 else if !next_items.is_empty() {
477 // Now process the next token
478 cur_items.extend(next_items.drain(..));
479
480 match src.next() {
481 Some(tt::TokenTree::Subtree(subtree)) => {
482 stack.push(src.clone());
483 src = TtIter::new(subtree);
484 }
485 None if !stack.is_empty() => src = stack.pop().unwrap(),
486 _ => (),
487 }
488 }
489 // Finally, we have the case where we need to call the black-box parser to get some
490 // nonterminal.
491 else {
492 stdx::always!(bb_items.len() == 1);
493 let mut item = bb_items.pop().unwrap();
494
495 if let Some(OpDelimited::Op(Op::Var { name, .. })) = item.dot.peek() {
496 let (iter, match_res) = item.meta_result.take().unwrap();
497 let bindings = item.bindings.last_mut().unwrap();
498 match match_res.value {
131 Some(fragment) => { 499 Some(fragment) => {
132 res.bindings.inner.insert(name.clone(), Binding::Fragment(fragment)); 500 bindings.inner.push((name.clone(), Binding::Fragment(fragment)));
133 } 501 }
134 None if match_err.is_none() => res.bindings.push_optional(name), 502 None if match_res.err.is_none() => bindings.push_optional(name),
135 _ => {} 503 _ => {}
136 } 504 }
137 if let Some(err) = match_err { 505 if let Some(err) = match_res.err {
138 res.add_err(err); 506 res.add_err(err);
139 } 507 }
508 src = iter.clone();
509 item.dot.next();
510 } else {
511 unreachable!()
140 } 512 }
141 Op::Repeat { tokens: subtree, kind, separator } => { 513 cur_items.push(item);
142 match_repeat(res, subtree, *kind, separator, src);
143 }
144 } 514 }
515 stdx::always!(!cur_items.is_empty());
145 } 516 }
146} 517}
147 518
@@ -173,73 +544,6 @@ fn match_leaf(lhs: &tt::Leaf, src: &mut TtIter) -> Result<(), ExpandError> {
173 Ok(()) 544 Ok(())
174} 545}
175 546
176fn match_repeat(
177 res: &mut Match,
178 pattern: &MetaTemplate,
179 kind: RepeatKind,
180 separator: &Option<Separator>,
181 src: &mut TtIter,
182) {
183 // Dirty hack to make macro-expansion terminate.
184 // This should be replaced by a proper macro-by-example implementation
185 let mut limit = 65536;
186 let mut counter = 0;
187
188 for i in 0.. {
189 let mut fork = src.clone();
190
191 if let Some(separator) = &separator {
192 if i != 0 && !fork.eat_separator(separator) {
193 break;
194 }
195 }
196
197 let mut nested = Match::default();
198 match_tokens(&mut nested, pattern, &mut fork);
199 if nested.err.is_none() {
200 limit -= 1;
201 if limit == 0 {
202 log::warn!(
203 "match_lhs exceeded repeat pattern limit => {:#?}\n{:#?}\n{:#?}\n{:#?}",
204 pattern,
205 src,
206 kind,
207 separator
208 );
209 break;
210 }
211 *src = fork;
212
213 if let Err(err) = res.bindings.push_nested(counter, nested.bindings) {
214 res.add_err(err);
215 }
216 counter += 1;
217 if counter == 1 {
218 if let RepeatKind::ZeroOrOne = kind {
219 break;
220 }
221 }
222 } else {
223 break;
224 }
225 }
226
227 match (kind, counter) {
228 (RepeatKind::OneOrMore, 0) => {
229 res.add_err(ExpandError::UnexpectedToken);
230 }
231 (_, 0) => {
232 // Collect all empty variables in subtrees
233 let mut vars = Vec::new();
234 collect_vars(&mut vars, pattern);
235 for var in vars {
236 res.bindings.push_empty(&var)
237 }
238 }
239 _ => (),
240 }
241}
242
243fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragment>> { 547fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragment>> {
244 let fragment = match kind { 548 let fragment = match kind {
245 "path" => Path, 549 "path" => Path,
@@ -303,14 +607,14 @@ fn collect_vars(buf: &mut Vec<SmolStr>, pattern: &MetaTemplate) {
303} 607}
304 608
305impl<'a> TtIter<'a> { 609impl<'a> TtIter<'a> {
306 fn eat_separator(&mut self, separator: &Separator) -> bool { 610 fn expect_separator(&mut self, separator: &Separator, idx: usize) -> bool {
307 let mut fork = self.clone(); 611 let mut fork = self.clone();
308 let ok = match separator { 612 let ok = match separator {
309 Separator::Ident(lhs) => match fork.expect_ident() { 613 Separator::Ident(lhs) if idx == 0 => match fork.expect_ident() {
310 Ok(rhs) => rhs.text == lhs.text, 614 Ok(rhs) => rhs.text == lhs.text,
311 _ => false, 615 _ => false,
312 }, 616 },
313 Separator::Literal(lhs) => match fork.expect_literal() { 617 Separator::Literal(lhs) if idx == 0 => match fork.expect_literal() {
314 Ok(rhs) => match rhs { 618 Ok(rhs) => match rhs {
315 tt::Leaf::Literal(rhs) => rhs.text == lhs.text, 619 tt::Leaf::Literal(rhs) => rhs.text == lhs.text,
316 tt::Leaf::Ident(rhs) => rhs.text == lhs.text, 620 tt::Leaf::Ident(rhs) => rhs.text == lhs.text,
@@ -318,10 +622,11 @@ impl<'a> TtIter<'a> {
318 }, 622 },
319 _ => false, 623 _ => false,
320 }, 624 },
321 Separator::Puncts(lhss) => lhss.iter().all(|lhs| match fork.expect_punct() { 625 Separator::Puncts(lhss) if idx < lhss.len() => match fork.expect_punct() {
322 Ok(rhs) => rhs.char == lhs.char, 626 Ok(rhs) => rhs.char == lhss[idx].char,
323 _ => false, 627 _ => false,
324 }), 628 },
629 _ => false,
325 }; 630 };
326 if ok { 631 if ok {
327 *self = fork; 632 *self = fork;
diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs
index 78368a33e..ad9953a7d 100644
--- a/crates/mbe/src/expander/transcriber.rs
+++ b/crates/mbe/src/expander/transcriber.rs
@@ -13,13 +13,17 @@ use crate::{
13 13
14impl Bindings { 14impl Bindings {
15 fn contains(&self, name: &str) -> bool { 15 fn contains(&self, name: &str) -> bool {
16 self.inner.contains_key(name) 16 self.inner.iter().any(|(n, _)| n == name)
17 } 17 }
18 18
19 fn get(&self, name: &str, nesting: &mut [NestingState]) -> Result<&Fragment, ExpandError> { 19 fn get(&self, name: &str, nesting: &mut [NestingState]) -> Result<&Fragment, ExpandError> {
20 let mut b = self.inner.get(name).ok_or_else(|| { 20 let mut b: &Binding = self
21 ExpandError::BindingError(format!("could not find binding `{}`", name)) 21 .inner
22 })?; 22 .iter()
23 .find_map(|(n, b)| if n == name { Some(b) } else { None })
24 .ok_or_else(|| {
25 ExpandError::BindingError(format!("could not find binding `{}`", name))
26 })?;
23 for nesting_state in nesting.iter_mut() { 27 for nesting_state in nesting.iter_mut() {
24 nesting_state.hit = true; 28 nesting_state.hit = true;
25 b = match b { 29 b = match b {
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs
index 4c298f85f..f3d2da55a 100644
--- a/crates/mbe/src/lib.rs
+++ b/crates/mbe/src/lib.rs
@@ -21,7 +21,7 @@ use test_utils::mark;
21pub use tt::{Delimiter, DelimiterKind, Punct}; 21pub use tt::{Delimiter, DelimiterKind, Punct};
22 22
23use crate::{ 23use crate::{
24 parser::{parse_pattern, parse_template, Op}, 24 parser::{parse_pattern, parse_template, MetaTemplate, Op},
25 tt_iter::TtIter, 25 tt_iter::TtIter,
26}; 26};
27 27
@@ -94,15 +94,6 @@ struct Rule {
94 rhs: MetaTemplate, 94 rhs: MetaTemplate,
95} 95}
96 96
97#[derive(Clone, Debug, PartialEq, Eq)]
98struct MetaTemplate(Vec<Op>);
99
100impl<'a> MetaTemplate {
101 fn iter(&self) -> impl Iterator<Item = &Op> {
102 self.0.iter()
103 }
104}
105
106#[derive(Clone, Copy, Debug, PartialEq, Eq)] 97#[derive(Clone, Copy, Debug, PartialEq, Eq)]
107struct Shift(u32); 98struct Shift(u32);
108 99
diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs
index f891ec29c..8671322e1 100644
--- a/crates/mbe/src/parser.rs
+++ b/crates/mbe/src/parser.rs
@@ -5,7 +5,75 @@ use smallvec::SmallVec;
5use syntax::SmolStr; 5use syntax::SmolStr;
6use tt::Delimiter; 6use tt::Delimiter;
7 7
8use crate::{tt_iter::TtIter, MetaTemplate, ParseError}; 8use crate::{tt_iter::TtIter, ParseError};
9
10#[derive(Clone, Debug, PartialEq, Eq)]
11pub(crate) struct MetaTemplate(pub(crate) Vec<Op>);
12
13#[derive(Debug, Clone, Copy)]
14pub(crate) enum OpDelimited<'a> {
15 Op(&'a Op),
16 Open,
17 Close,
18}
19
20#[derive(Debug, Clone, Copy)]
21pub(crate) struct OpDelimitedIter<'a> {
22 inner: &'a Vec<Op>,
23 delimited: Option<&'a Delimiter>,
24 idx: usize,
25}
26
27impl<'a> OpDelimitedIter<'a> {
28 pub(crate) fn is_eof(&self) -> bool {
29 let len = self.inner.len() + if self.delimited.is_some() { 2 } else { 0 };
30 self.idx >= len
31 }
32
33 pub(crate) fn peek(&self) -> Option<OpDelimited<'a>> {
34 match self.delimited {
35 None => self.inner.get(self.idx).map(OpDelimited::Op),
36 Some(_) => match self.idx {
37 0 => Some(OpDelimited::Open),
38 i if i == self.inner.len() + 1 => Some(OpDelimited::Close),
39 i => self.inner.get(i - 1).map(OpDelimited::Op),
40 },
41 }
42 }
43
44 pub(crate) fn reset(&self) -> Self {
45 Self { inner: &self.inner, idx: 0, delimited: self.delimited }
46 }
47}
48
49impl<'a> Iterator for OpDelimitedIter<'a> {
50 type Item = OpDelimited<'a>;
51
52 fn next(&mut self) -> Option<Self::Item> {
53 let res = self.peek();
54 self.idx += 1;
55 res
56 }
57
58 fn size_hint(&self) -> (usize, Option<usize>) {
59 let len = self.inner.len() + if self.delimited.is_some() { 2 } else { 0 };
60 let remain = len.checked_sub(self.idx).unwrap_or(0);
61 (remain, Some(remain))
62 }
63}
64
65impl<'a> MetaTemplate {
66 pub(crate) fn iter(&self) -> impl Iterator<Item = &Op> {
67 self.0.iter()
68 }
69
70 pub(crate) fn iter_delimited(
71 &'a self,
72 delimited: Option<&'a Delimiter>,
73 ) -> OpDelimitedIter<'a> {
74 OpDelimitedIter { inner: &self.0, idx: 0, delimited }
75 }
76}
9 77
10#[derive(Clone, Debug, PartialEq, Eq)] 78#[derive(Clone, Debug, PartialEq, Eq)]
11pub(crate) enum Op { 79pub(crate) enum Op {
@@ -47,6 +115,16 @@ impl PartialEq for Separator {
47 } 115 }
48} 116}
49 117
118impl Separator {
119 pub(crate) fn tt_count(&self) -> usize {
120 match self {
121 Separator::Literal(_) => 1,
122 Separator::Ident(_) => 1,
123 Separator::Puncts(it) => it.len(),
124 }
125 }
126}
127
50pub(crate) fn parse_template(template: &tt::Subtree) -> Result<Vec<Op>, ParseError> { 128pub(crate) fn parse_template(template: &tt::Subtree) -> Result<Vec<Op>, ParseError> {
51 parse_inner(&template, Mode::Template).into_iter().collect() 129 parse_inner(&template, Mode::Template).into_iter().collect()
52} 130}
diff --git a/crates/mbe/src/tests.rs b/crates/mbe/src/tests.rs
index f1eadcd1e..5c641ebf2 100644
--- a/crates/mbe/src/tests.rs
+++ b/crates/mbe/src/tests.rs
@@ -457,6 +457,17 @@ fn test_match_group_with_multichar_sep() {
457} 457}
458 458
459#[test] 459#[test]
460fn test_match_group_with_multichar_sep2() {
461 parse_macro(
462 r#"
463 macro_rules! foo {
464 (fn $name:ident {$($i:literal)&&*} ) => ( fn $name() -> bool { $($i)&&*} );
465 }"#,
466 )
467 .assert_expand_items("foo! (fn baz {true && true} );", "fn baz () -> bool {true &&true}");
468}
469
470#[test]
460fn test_match_group_zero_match() { 471fn test_match_group_zero_match() {
461 parse_macro( 472 parse_macro(
462 r#" 473 r#"
@@ -1267,6 +1278,18 @@ macro_rules! m {
1267 .is_some()); 1278 .is_some());
1268} 1279}
1269 1280
1281#[test]
1282fn test_match_is_not_greedy() {
1283 parse_macro(
1284 r#"
1285macro_rules! foo {
1286 ($($i:ident $(,)*),*) => {};
1287}
1288"#,
1289 )
1290 .assert_expand_items(r#"foo!(a,b);"#, r#""#);
1291}
1292
1270// The following tests are based on real world situations 1293// The following tests are based on real world situations
1271#[test] 1294#[test]
1272fn test_vec() { 1295fn test_vec() {
diff --git a/crates/proc_macro_srv/Cargo.toml b/crates/proc_macro_srv/Cargo.toml
index 6c8c28980..4c1b3036a 100644
--- a/crates/proc_macro_srv/Cargo.toml
+++ b/crates/proc_macro_srv/Cargo.toml
@@ -20,7 +20,7 @@ proc_macro_api = { path = "../proc_macro_api", version = "0.0.0" }
20test_utils = { path = "../test_utils", version = "0.0.0" } 20test_utils = { path = "../test_utils", version = "0.0.0" }
21 21
22[dev-dependencies] 22[dev-dependencies]
23cargo_metadata = "0.12.2" 23cargo_metadata = "0.13"
24 24
25# used as proc macro test targets 25# used as proc macro test targets
26serde_derive = "1.0.106" 26serde_derive = "1.0.106"
diff --git a/crates/proc_macro_srv/src/rustc_server.rs b/crates/proc_macro_srv/src/rustc_server.rs
index a8504f762..952b4a97f 100644
--- a/crates/proc_macro_srv/src/rustc_server.rs
+++ b/crates/proc_macro_srv/src/rustc_server.rs
@@ -14,7 +14,6 @@ use std::collections::HashMap;
14use std::hash::Hash; 14use std::hash::Hash;
15use std::iter::FromIterator; 15use std::iter::FromIterator;
16use std::ops::Bound; 16use std::ops::Bound;
17use std::str::FromStr;
18use std::{ascii, vec::IntoIter}; 17use std::{ascii, vec::IntoIter};
19 18
20type Group = tt::Subtree; 19type Group = tt::Subtree;
@@ -278,6 +277,42 @@ impl server::FreeFunctions for Rustc {
278 } 277 }
279} 278}
280 279
280fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
281 tt::Subtree {
282 delimiter: subtree.delimiter.map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
283 token_trees: subtree
284 .token_trees
285 .into_iter()
286 .map(|t| token_tree_replace_token_ids_with_unspecified(t))
287 .collect(),
288 }
289}
290
291fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
292 match tt {
293 tt::TokenTree::Leaf(leaf) => {
294 tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
295 }
296 tt::TokenTree::Subtree(subtree) => {
297 tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
298 }
299 }
300}
301
302fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
303 match leaf {
304 tt::Leaf::Literal(lit) => {
305 tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
306 }
307 tt::Leaf::Punct(punct) => {
308 tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
309 }
310 tt::Leaf::Ident(ident) => {
311 tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
312 }
313 }
314}
315
281impl server::TokenStream for Rustc { 316impl server::TokenStream for Rustc {
282 fn new(&mut self) -> Self::TokenStream { 317 fn new(&mut self) -> Self::TokenStream {
283 Self::TokenStream::new() 318 Self::TokenStream::new()
@@ -287,7 +322,8 @@ impl server::TokenStream for Rustc {
287 stream.is_empty() 322 stream.is_empty()
288 } 323 }
289 fn from_str(&mut self, src: &str) -> Self::TokenStream { 324 fn from_str(&mut self, src: &str) -> Self::TokenStream {
290 Self::TokenStream::from_str(src).expect("cannot parse string") 325 let (subtree, _) = mbe::parse_to_token_tree(src).expect("cannot parse string");
326 TokenStream::with_subtree(subtree_replace_token_ids_with_unspecified(subtree))
291 } 327 }
292 fn to_string(&mut self, stream: &Self::TokenStream) -> String { 328 fn to_string(&mut self, stream: &Self::TokenStream) -> String {
293 stream.to_string() 329 stream.to_string()
diff --git a/crates/proc_macro_srv/src/tests/utils.rs b/crates/proc_macro_srv/src/tests/utils.rs
index 196abb8fc..22813052d 100644
--- a/crates/proc_macro_srv/src/tests/utils.rs
+++ b/crates/proc_macro_srv/src/tests/utils.rs
@@ -8,6 +8,7 @@ use test_utils::assert_eq_text;
8 8
9mod fixtures { 9mod fixtures {
10 use cargo_metadata::Message; 10 use cargo_metadata::Message;
11 use std::path::PathBuf;
11 use std::process::Command; 12 use std::process::Command;
12 13
13 // Use current project metadata to get the proc-macro dylib path 14 // Use current project metadata to get the proc-macro dylib path
@@ -24,7 +25,7 @@ mod fixtures {
24 if artifact.target.kind.contains(&"proc-macro".to_string()) { 25 if artifact.target.kind.contains(&"proc-macro".to_string()) {
25 let repr = format!("{} {}", crate_name, version); 26 let repr = format!("{} {}", crate_name, version);
26 if artifact.package_id.repr.starts_with(&repr) { 27 if artifact.package_id.repr.starts_with(&repr) {
27 return artifact.filenames[0].clone(); 28 return PathBuf::from(&artifact.filenames[0]);
28 } 29 }
29 } 30 }
30 } 31 }
diff --git a/crates/project_model/Cargo.toml b/crates/project_model/Cargo.toml
index 293cb5bfe..fe3258332 100644
--- a/crates/project_model/Cargo.toml
+++ b/crates/project_model/Cargo.toml
@@ -12,7 +12,7 @@ doctest = false
12[dependencies] 12[dependencies]
13log = "0.4.8" 13log = "0.4.8"
14rustc-hash = "1.1.0" 14rustc-hash = "1.1.0"
15cargo_metadata = "0.12.2" 15cargo_metadata = "0.13"
16serde = { version = "1.0.106", features = ["derive"] } 16serde = { version = "1.0.106", features = ["derive"] }
17serde_json = "1.0.48" 17serde_json = "1.0.48"
18anyhow = "1.0.26" 18anyhow = "1.0.26"
@@ -22,7 +22,7 @@ la-arena = { version = "0.2.0", path = "../../lib/arena" }
22cfg = { path = "../cfg", version = "0.0.0" } 22cfg = { path = "../cfg", version = "0.0.0" }
23base_db = { path = "../base_db", version = "0.0.0" } 23base_db = { path = "../base_db", version = "0.0.0" }
24toolchain = { path = "../toolchain", version = "0.0.0" } 24toolchain = { path = "../toolchain", version = "0.0.0" }
25proc_macro_api = { path = "../proc_macro_api", version = "0.0.0" } 25proc_macro_api = { path = "../proc_macro_api", version = "0.0.0" }
26paths = { path = "../paths", version = "0.0.0" } 26paths = { path = "../paths", version = "0.0.0" }
27stdx = { path = "../stdx", version = "0.0.0" } 27stdx = { path = "../stdx", version = "0.0.0" }
28profile = { path = "../profile", version = "0.0.0" } 28profile = { path = "../profile", version = "0.0.0" }
diff --git a/crates/project_model/src/build_data.rs b/crates/project_model/src/build_data.rs
index 295b5f8ef..728a258ea 100644
--- a/crates/project_model/src/build_data.rs
+++ b/crates/project_model/src/build_data.rs
@@ -1,14 +1,14 @@
1//! Handles build script specific information 1//! Handles build script specific information
2 2
3use std::{ 3use std::{
4 ffi::OsStr,
5 io::BufReader, 4 io::BufReader,
6 path::{Path, PathBuf}, 5 path::PathBuf,
7 process::{Command, Stdio}, 6 process::{Command, Stdio},
8 sync::Arc, 7 sync::Arc,
9}; 8};
10 9
11use anyhow::Result; 10use anyhow::Result;
11use cargo_metadata::camino::Utf8Path;
12use cargo_metadata::{BuildScript, Message}; 12use cargo_metadata::{BuildScript, Message};
13use itertools::Itertools; 13use itertools::Itertools;
14use paths::{AbsPath, AbsPathBuf}; 14use paths::{AbsPath, AbsPathBuf};
@@ -162,8 +162,8 @@ fn collect_from_workspace(
162 let res = res.entry(package_id.repr.clone()).or_default(); 162 let res = res.entry(package_id.repr.clone()).or_default();
163 // cargo_metadata crate returns default (empty) path for 163 // cargo_metadata crate returns default (empty) path for
164 // older cargos, which is not absolute, so work around that. 164 // older cargos, which is not absolute, so work around that.
165 if out_dir != PathBuf::default() { 165 if !out_dir.as_str().is_empty() {
166 let out_dir = AbsPathBuf::assert(out_dir); 166 let out_dir = AbsPathBuf::assert(PathBuf::from(out_dir.into_os_string()));
167 res.out_dir = Some(out_dir); 167 res.out_dir = Some(out_dir);
168 res.cfgs = cfgs; 168 res.cfgs = cfgs;
169 } 169 }
@@ -178,7 +178,7 @@ fn collect_from_workspace(
178 // Skip rmeta file 178 // Skip rmeta file
179 if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name)) 179 if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name))
180 { 180 {
181 let filename = AbsPathBuf::assert(filename.clone()); 181 let filename = AbsPathBuf::assert(PathBuf::from(&filename));
182 let res = res.entry(package_id.repr.clone()).or_default(); 182 let res = res.entry(package_id.repr.clone()).or_default();
183 res.proc_macro_dylib_path = Some(filename); 183 res.proc_macro_dylib_path = Some(filename);
184 } 184 }
@@ -187,9 +187,9 @@ fn collect_from_workspace(
187 Message::CompilerMessage(message) => { 187 Message::CompilerMessage(message) => {
188 progress(message.target.name.clone()); 188 progress(message.target.name.clone());
189 } 189 }
190 Message::Unknown => (),
191 Message::BuildFinished(_) => {} 190 Message::BuildFinished(_) => {}
192 Message::TextLine(_) => {} 191 Message::TextLine(_) => {}
192 _ => {}
193 } 193 }
194 } 194 }
195 } 195 }
@@ -209,8 +209,8 @@ fn collect_from_workspace(
209} 209}
210 210
211// FIXME: File a better way to know if it is a dylib 211// FIXME: File a better way to know if it is a dylib
212fn is_dylib(path: &Path) -> bool { 212fn is_dylib(path: &Utf8Path) -> bool {
213 match path.extension().and_then(OsStr::to_str).map(|it| it.to_string().to_lowercase()) { 213 match path.extension().map(|e| e.to_string().to_lowercase()) {
214 None => false, 214 None => false,
215 Some(ext) => matches!(ext.as_str(), "dll" | "dylib" | "so"), 215 Some(ext) => matches!(ext.as_str(), "dll" | "dylib" | "so"),
216 } 216 }
@@ -227,9 +227,7 @@ fn inject_cargo_env(package: &cargo_metadata::Package, build_data: &mut BuildDat
227 227
228 let mut manifest_dir = package.manifest_path.clone(); 228 let mut manifest_dir = package.manifest_path.clone();
229 manifest_dir.pop(); 229 manifest_dir.pop();
230 if let Some(cargo_manifest_dir) = manifest_dir.to_str() { 230 env.push(("CARGO_MANIFEST_DIR".into(), manifest_dir.into_string()));
231 env.push(("CARGO_MANIFEST_DIR".into(), cargo_manifest_dir.into()));
232 }
233 231
234 // Not always right, but works for common cases. 232 // Not always right, but works for common cases.
235 env.push(("CARGO".into(), "cargo".into())); 233 env.push(("CARGO".into(), "cargo".into()));
@@ -251,7 +249,6 @@ fn inject_cargo_env(package: &cargo_metadata::Package, build_data: &mut BuildDat
251 env.push(("CARGO_PKG_REPOSITORY".into(), package.repository.clone().unwrap_or_default())); 249 env.push(("CARGO_PKG_REPOSITORY".into(), package.repository.clone().unwrap_or_default()));
252 env.push(("CARGO_PKG_LICENSE".into(), package.license.clone().unwrap_or_default())); 250 env.push(("CARGO_PKG_LICENSE".into(), package.license.clone().unwrap_or_default()));
253 251
254 let license_file = 252 let license_file = package.license_file.as_ref().map(|buf| buf.to_string()).unwrap_or_default();
255 package.license_file.as_ref().map(|buf| buf.display().to_string()).unwrap_or_default();
256 env.push(("CARGO_PKG_LICENSE_FILE".into(), license_file)); 253 env.push(("CARGO_PKG_LICENSE_FILE".into(), license_file));
257} 254}
diff --git a/crates/project_model/src/cargo_workspace.rs b/crates/project_model/src/cargo_workspace.rs
index 1d8d34a0b..f7241b711 100644
--- a/crates/project_model/src/cargo_workspace.rs
+++ b/crates/project_model/src/cargo_workspace.rs
@@ -1,5 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use std::path::PathBuf;
3use std::{convert::TryInto, ops, process::Command, sync::Arc}; 4use std::{convert::TryInto, ops, process::Command, sync::Arc};
4 5
5use anyhow::{Context, Result}; 6use anyhow::{Context, Result};
@@ -249,11 +250,12 @@ impl CargoWorkspace {
249 let edition = edition 250 let edition = edition
250 .parse::<Edition>() 251 .parse::<Edition>()
251 .with_context(|| format!("Failed to parse edition {}", edition))?; 252 .with_context(|| format!("Failed to parse edition {}", edition))?;
253
252 let pkg = packages.alloc(PackageData { 254 let pkg = packages.alloc(PackageData {
253 id: id.repr.clone(), 255 id: id.repr.clone(),
254 name: name.clone(), 256 name: name.clone(),
255 version: version.to_string(), 257 version: version.to_string(),
256 manifest: AbsPathBuf::assert(manifest_path.clone()), 258 manifest: AbsPathBuf::assert(PathBuf::from(&manifest_path)),
257 targets: Vec::new(), 259 targets: Vec::new(),
258 is_member, 260 is_member,
259 edition, 261 edition,
@@ -268,7 +270,7 @@ impl CargoWorkspace {
268 let tgt = targets.alloc(TargetData { 270 let tgt = targets.alloc(TargetData {
269 package: pkg, 271 package: pkg,
270 name: meta_tgt.name.clone(), 272 name: meta_tgt.name.clone(),
271 root: AbsPathBuf::assert(meta_tgt.src_path.clone()), 273 root: AbsPathBuf::assert(PathBuf::from(&meta_tgt.src_path)),
272 kind: TargetKind::new(meta_tgt.kind.as_slice()), 274 kind: TargetKind::new(meta_tgt.kind.as_slice()),
273 is_proc_macro, 275 is_proc_macro,
274 }); 276 });
@@ -305,7 +307,8 @@ impl CargoWorkspace {
305 packages[source].active_features.extend(node.features); 307 packages[source].active_features.extend(node.features);
306 } 308 }
307 309
308 let workspace_root = AbsPathBuf::assert(meta.workspace_root); 310 let workspace_root =
311 AbsPathBuf::assert(PathBuf::from(meta.workspace_root.into_os_string()));
309 let build_data_config = BuildDataConfig::new( 312 let build_data_config = BuildDataConfig::new(
310 cargo_toml.to_path_buf(), 313 cargo_toml.to_path_buf(),
311 config.clone(), 314 config.clone(),
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index b881cc229..8789f0852 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -24,7 +24,7 @@ jod-thread = "0.1.0"
24log = "0.4.8" 24log = "0.4.8"
25lsp-types = { version = "0.88.0", features = ["proposed"] } 25lsp-types = { version = "0.88.0", features = ["proposed"] }
26parking_lot = "0.11.0" 26parking_lot = "0.11.0"
27pico-args = "0.4.0" 27xflags = "0.1.2"
28oorandom = "11.1.2" 28oorandom = "11.1.2"
29rustc-hash = "1.1.0" 29rustc-hash = "1.1.0"
30serde = { version = "1.0.106", features = ["derive"] } 30serde = { version = "1.0.106", features = ["derive"] }
diff --git a/crates/rust-analyzer/src/bin/args.rs b/crates/rust-analyzer/src/bin/args.rs
deleted file mode 100644
index 164d94a30..000000000
--- a/crates/rust-analyzer/src/bin/args.rs
+++ /dev/null
@@ -1,274 +0,0 @@
1//! Command like parsing for rust-analyzer.
2//!
3//! If run started args, we run the LSP server loop. With a subcommand, we do a
4//! one-time batch processing.
5
6use std::{env, path::PathBuf};
7
8use anyhow::{bail, format_err, Result};
9use ide_ssr::{SsrPattern, SsrRule};
10use pico_args::Arguments;
11use rust_analyzer::cli::{AnalysisStatsCmd, BenchCmd, BenchWhat, Position, Verbosity};
12use vfs::AbsPathBuf;
13
14pub(crate) struct Args {
15 pub(crate) verbosity: Verbosity,
16 pub(crate) log_file: Option<PathBuf>,
17 pub(crate) no_buffering: bool,
18 pub(crate) command: Command,
19 #[allow(unused)]
20 pub(crate) wait_dbg: bool,
21}
22
23pub(crate) enum Command {
24 Parse { no_dump: bool },
25 Symbols,
26 Highlight { rainbow: bool },
27 AnalysisStats(AnalysisStatsCmd),
28 Bench(BenchCmd),
29 Diagnostics { path: PathBuf, load_output_dirs: bool, with_proc_macro: bool },
30 Ssr { rules: Vec<SsrRule> },
31 StructuredSearch { debug_snippet: Option<String>, patterns: Vec<SsrPattern> },
32 ProcMacro,
33 RunServer,
34 PrintConfigSchema,
35 Version,
36 Help,
37}
38
39const HELP: &str = "\
40rust-analyzer
41
42USAGE:
43 rust-analyzer [FLAGS] [COMMAND] [COMMAND_OPTIONS]
44
45FLAGS:
46 --version Print version
47 -h, --help Print this help
48
49 -v, --verbose
50 -vv, --spammy
51 -q, --quiet Set verbosity
52
53 --print-config-schema
54 Dump a LSP config JSON schema
55 --log-file <PATH> Log to the specified file instead of stderr
56 --no-log-buffering
57 Flush log records to the file immediately
58
59 --wait-dbg Wait until a debugger is attached to.
60 The flag is valid for debug builds only
61
62ENVIRONMENTAL VARIABLES:
63 RA_LOG Set log filter in env_logger format
64 RA_PROFILE Enable hierarchical profiler
65 RA_WAIT_DBG If set acts like a --wait-dbg flag
66
67COMMANDS:
68
69not specified Launch LSP server
70
71parse < main.rs Parse tree
72 --no-dump Suppress printing
73
74symbols < main.rs Parse input an print the list of symbols
75
76highlight < main.rs Highlight input as html
77 --rainbow Enable rainbow highlighting of identifiers
78
79analysis-stats <PATH> Batch typecheck project and print summary statistics
80 <PATH> Directory with Cargo.toml
81 --randomize Randomize order in which crates, modules, and items are processed
82 --parallel Run type inference in parallel
83 --memory-usage Collect memory usage statistics
84 -o, --only <PATH> Only analyze items matching this path
85 --with-deps Also analyze all dependencies
86 --load-output-dirs
87 Load OUT_DIR values by running `cargo check` before analysis
88 --with-proc-macro Use proc-macro-srv for proc-macro expanding
89
90analysis-bench <PATH> Benchmark specific analysis operation
91 <PATH> Directory with Cargo.toml
92 --highlight <PATH>
93 Compute syntax highlighting for this file
94 --complete <PATH:LINE:COLUMN>
95 Compute completions at this location
96 --goto-def <PATH:LINE:COLUMN>
97 Compute goto definition at this location
98 --memory-usage Collect memory usage statistics
99 --load-output-dirs
100 Load OUT_DIR values by running `cargo check` before analysis
101 --with-proc-macro Use proc-macro-srv for proc-macro expanding
102
103diagnostics <PATH>
104 <PATH> Directory with Cargo.toml
105 --load-output-dirs
106 Load OUT_DIR values by running `cargo check` before analysis
107 --with-proc-macro Use proc-macro-srv for proc-macro expanding
108
109ssr [RULE...]
110 <RULE> A structured search replace rule (`$a.foo($b) ==> bar($a, $b)`)
111
112search [PATTERN..]
113 <PATTERN> A structured search replace pattern (`$a.foo($b)`)
114 --debug <snippet> Prints debug information for any nodes with source exactly
115 equal to <snippet>
116";
117
118impl Args {
119 pub(crate) fn parse() -> Result<Args> {
120 let mut matches = Arguments::from_env();
121
122 if matches.contains("--version") {
123 finish_args(matches)?;
124 return Ok(Args {
125 verbosity: Verbosity::Normal,
126 log_file: None,
127 command: Command::Version,
128 no_buffering: false,
129 wait_dbg: false,
130 });
131 }
132
133 let verbosity = match (
134 matches.contains(["-vv", "--spammy"]),
135 matches.contains(["-v", "--verbose"]),
136 matches.contains(["-q", "--quiet"]),
137 ) {
138 (true, _, true) => bail!("Invalid flags: -q conflicts with -vv"),
139 (true, _, false) => Verbosity::Spammy,
140 (false, false, false) => Verbosity::Normal,
141 (false, false, true) => Verbosity::Quiet,
142 (false, true, false) => Verbosity::Verbose,
143 (false, true, true) => bail!("Invalid flags: -q conflicts with -v"),
144 };
145 let log_file = matches.opt_value_from_str("--log-file")?;
146 let no_buffering = matches.contains("--no-log-buffering");
147 let wait_dbg = matches.contains("--wait-dbg");
148
149 if matches.contains(["-h", "--help"]) {
150 eprintln!("{}", HELP);
151 return Ok(Args {
152 verbosity,
153 log_file: None,
154 command: Command::Help,
155 no_buffering,
156 wait_dbg,
157 });
158 }
159
160 if matches.contains("--print-config-schema") {
161 return Ok(Args {
162 verbosity,
163 log_file,
164 command: Command::PrintConfigSchema,
165 no_buffering,
166 wait_dbg,
167 });
168 }
169
170 let subcommand = match matches.subcommand()? {
171 Some(it) => it,
172 None => {
173 finish_args(matches)?;
174 return Ok(Args {
175 verbosity,
176 log_file,
177 command: Command::RunServer,
178 no_buffering,
179 wait_dbg,
180 });
181 }
182 };
183 let command = match subcommand.as_str() {
184 "parse" => Command::Parse { no_dump: matches.contains("--no-dump") },
185 "symbols" => Command::Symbols,
186 "highlight" => Command::Highlight { rainbow: matches.contains("--rainbow") },
187 "analysis-stats" => Command::AnalysisStats(AnalysisStatsCmd {
188 randomize: matches.contains("--randomize"),
189 parallel: matches.contains("--parallel"),
190 memory_usage: matches.contains("--memory-usage"),
191 only: matches.opt_value_from_str(["-o", "--only"])?,
192 with_deps: matches.contains("--with-deps"),
193 load_output_dirs: matches.contains("--load-output-dirs"),
194 with_proc_macro: matches.contains("--with-proc-macro"),
195 path: matches
196 .opt_free_from_str()?
197 .ok_or_else(|| format_err!("expected positional argument"))?,
198 }),
199 "analysis-bench" => Command::Bench(BenchCmd {
200 what: {
201 let highlight_path: Option<String> =
202 matches.opt_value_from_str("--highlight")?;
203 let complete_path: Option<Position> =
204 matches.opt_value_from_str("--complete")?;
205 let goto_def_path: Option<Position> =
206 matches.opt_value_from_str("--goto-def")?;
207 match (highlight_path, complete_path, goto_def_path) {
208 (Some(path), None, None) => {
209 let path = env::current_dir().unwrap().join(path);
210 BenchWhat::Highlight { path: AbsPathBuf::assert(path) }
211 }
212 (None, Some(position), None) => BenchWhat::Complete(position),
213 (None, None, Some(position)) => BenchWhat::GotoDef(position),
214 _ => panic!(
215 "exactly one of `--highlight`, `--complete` or `--goto-def` must be set"
216 ),
217 }
218 },
219 memory_usage: matches.contains("--memory-usage"),
220 load_output_dirs: matches.contains("--load-output-dirs"),
221 with_proc_macro: matches.contains("--with-proc-macro"),
222 path: matches
223 .opt_free_from_str()?
224 .ok_or_else(|| format_err!("expected positional argument"))?,
225 }),
226 "diagnostics" => Command::Diagnostics {
227 load_output_dirs: matches.contains("--load-output-dirs"),
228 with_proc_macro: matches.contains("--with-proc-macro"),
229 path: matches
230 .opt_free_from_str()?
231 .ok_or_else(|| format_err!("expected positional argument"))?,
232 },
233 "proc-macro" => Command::ProcMacro,
234 "ssr" => Command::Ssr {
235 rules: {
236 let mut acc = Vec::new();
237 while let Some(rule) = matches.opt_free_from_str()? {
238 acc.push(rule);
239 }
240 acc
241 },
242 },
243 "search" => Command::StructuredSearch {
244 debug_snippet: matches.opt_value_from_str("--debug")?,
245 patterns: {
246 let mut acc = Vec::new();
247 while let Some(rule) = matches.opt_free_from_str()? {
248 acc.push(rule);
249 }
250 acc
251 },
252 },
253 _ => {
254 eprintln!("{}", HELP);
255 return Ok(Args {
256 verbosity,
257 log_file: None,
258 command: Command::Help,
259 no_buffering,
260 wait_dbg,
261 });
262 }
263 };
264 finish_args(matches)?;
265 Ok(Args { verbosity, log_file, command, no_buffering, wait_dbg })
266 }
267}
268
269fn finish_args(args: Arguments) -> Result<()> {
270 if !args.finish().is_empty() {
271 bail!("Unused arguments.");
272 }
273 Ok(())
274}
diff --git a/crates/rust-analyzer/src/bin/flags.rs b/crates/rust-analyzer/src/bin/flags.rs
new file mode 100644
index 000000000..244912d26
--- /dev/null
+++ b/crates/rust-analyzer/src/bin/flags.rs
@@ -0,0 +1,251 @@
1//! Grammar for the command-line arguments.
2#![allow(unreachable_pub)]
3use std::{env, path::PathBuf};
4
5use ide_ssr::{SsrPattern, SsrRule};
6use rust_analyzer::cli::{BenchWhat, Position, Verbosity};
7use vfs::AbsPathBuf;
8
9xflags::args_parser! {
10 /// LSP server for the Rust programming language.
11 cmd rust-analyzer {
12 /// Verbosity level, can be repeated multiple times.
13 repeated -v, --verbose
14 /// Verbosity level.
15 optional -q, --quiet
16
17 /// Log to the specified file instead of stderr.
18 optional --log-file path: PathBuf
19 /// Flush log records to the file immediately.
20 optional --no-log-buffering
21
22 /// Wait until a debugger is attached to (requires debug build).
23 optional --wait-dbg
24
25 default cmd lsp-server {
26 /// Print version.
27 optional --version
28 /// Print help.
29 optional -h, --help
30
31 /// Dump a LSP config JSON schema.
32 optional --print-config-schema
33 }
34
35 /// Parse stdin.
36 cmd parse {
37 /// Suppress printing.
38 optional --no-dump
39 }
40
41 /// Parse stdin and print the list of symbols.
42 cmd symbols {}
43
44 /// Highlight stdin as html.
45 cmd highlight {
46 /// Enable rainbow highlighting of identifiers.
47 optional --rainbow
48 }
49
50 /// Batch typecheck project and print summary statistics
51 cmd analysis-stats
52 /// Directory with Cargo.toml.
53 required path: PathBuf
54 {
55 /// Randomize order in which crates, modules, and items are processed.
56 optional --randomize
57 /// Run type inference in parallel.
58 optional --parallel
59 /// Collect memory usage statistics.
60 optional --memory-usage
61
62 /// Only analyze items matching this path.
63 optional -o, --only path: String
64 /// Also analyze all dependencies.
65 optional --with-deps
66
67 /// Load OUT_DIR values by running `cargo check` before analysis.
68 optional --load-output-dirs
69 /// Use proc-macro-srv for proc-macro expanding.
70 optional --with-proc-macro
71 }
72
73 /// Benchmark specific analysis operation
74 cmd analysis-bench
75 /// Directory with Cargo.toml.
76 required path: PathBuf
77 {
78 /// Collect memory usage statistics.
79 optional --memory-usage
80
81 /// Compute syntax highlighting for this file
82 optional --highlight path: PathBuf
83 /// Compute completions at file:line:column location.
84 optional --complete location: Position
85 /// Compute goto definition at file:line:column location.
86 optional --goto-def location: Position
87
88 /// Load OUT_DIR values by running `cargo check` before analysis.
89 optional --load-output-dirs
90 /// Use proc-macro-srv for proc-macro expanding.
91 optional --with-proc-macro
92 }
93
94 cmd diagnostics
95 /// Directory with Cargo.toml.
96 required path: PathBuf
97 {
98 /// Load OUT_DIR values by running `cargo check` before analysis.
99 optional --load-output-dirs
100 /// Use proc-macro-srv for proc-macro expanding.
101 optional --with-proc-macro
102 }
103
104 cmd ssr
105 /// A structured search replace rule (`$a.foo($b) ==> bar($a, $b)`)
106 repeated rule: SsrRule
107 {}
108
109 cmd search
110 /// A structured search replace pattern (`$a.foo($b)`)
111 repeated pattern: SsrPattern
112 {
113 /// Prints debug information for any nodes with source exactly equal to snippet.
114 optional --debug snippet: String
115 }
116
117 cmd proc-macro {}
118 }
119}
120
121// generated start
122// The following code is generated by `xflags` macro.
123// Run `env XFLAGS_DUMP= cargo build` to regenerate.
124#[derive(Debug)]
125pub struct RustAnalyzer {
126 pub verbose: u32,
127 pub quiet: bool,
128 pub log_file: Option<PathBuf>,
129 pub no_log_buffering: bool,
130 pub wait_dbg: bool,
131 pub subcommand: RustAnalyzerCmd,
132}
133
134#[derive(Debug)]
135pub enum RustAnalyzerCmd {
136 LspServer(LspServer),
137 Parse(Parse),
138 Symbols(Symbols),
139 Highlight(Highlight),
140 AnalysisStats(AnalysisStats),
141 AnalysisBench(AnalysisBench),
142 Diagnostics(Diagnostics),
143 Ssr(Ssr),
144 Search(Search),
145 ProcMacro(ProcMacro),
146}
147
148#[derive(Debug)]
149pub struct LspServer {
150 pub version: bool,
151 pub help: bool,
152 pub print_config_schema: bool,
153}
154
155#[derive(Debug)]
156pub struct Parse {
157 pub no_dump: bool,
158}
159
160#[derive(Debug)]
161pub struct Symbols {}
162
163#[derive(Debug)]
164pub struct Highlight {
165 pub rainbow: bool,
166}
167
168#[derive(Debug)]
169pub struct AnalysisStats {
170 pub path: PathBuf,
171
172 pub randomize: bool,
173 pub parallel: bool,
174 pub memory_usage: bool,
175 pub only: Option<String>,
176 pub with_deps: bool,
177 pub load_output_dirs: bool,
178 pub with_proc_macro: bool,
179}
180
181#[derive(Debug)]
182pub struct AnalysisBench {
183 pub path: PathBuf,
184
185 pub memory_usage: bool,
186 pub highlight: Option<PathBuf>,
187 pub complete: Option<Position>,
188 pub goto_def: Option<Position>,
189 pub load_output_dirs: bool,
190 pub with_proc_macro: bool,
191}
192
193#[derive(Debug)]
194pub struct Diagnostics {
195 pub path: PathBuf,
196
197 pub load_output_dirs: bool,
198 pub with_proc_macro: bool,
199}
200
201#[derive(Debug)]
202pub struct Ssr {
203 pub rule: Vec<SsrRule>,
204}
205
206#[derive(Debug)]
207pub struct Search {
208 pub pattern: Vec<SsrPattern>,
209
210 pub debug: Option<String>,
211}
212
213#[derive(Debug)]
214pub struct ProcMacro {}
215
216impl RustAnalyzer {
217 pub const HELP: &'static str = Self::_HELP;
218
219 pub fn from_env() -> xflags::Result<Self> {
220 let mut p = xflags::rt::Parser::new_from_env();
221 Self::_parse(&mut p)
222 }
223}
224// generated end
225
226impl RustAnalyzer {
227 pub(crate) fn verbosity(&self) -> Verbosity {
228 if self.quiet {
229 return Verbosity::Quiet;
230 }
231 match self.verbose {
232 0 => Verbosity::Normal,
233 1 => Verbosity::Verbose,
234 _ => Verbosity::Spammy,
235 }
236 }
237}
238
239impl AnalysisBench {
240 pub(crate) fn what(&self) -> BenchWhat {
241 match (&self.highlight, &self.complete, &self.goto_def) {
242 (Some(path), None, None) => {
243 let path = env::current_dir().unwrap().join(path);
244 BenchWhat::Highlight { path: AbsPathBuf::assert(path) }
245 }
246 (None, Some(position), None) => BenchWhat::Complete(position.clone()),
247 (None, None, Some(position)) => BenchWhat::GotoDef(position.clone()),
248 _ => panic!("exactly one of `--highlight`, `--complete` or `--goto-def` must be set"),
249 }
250 }
251}
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs
index 89482b952..288847980 100644
--- a/crates/rust-analyzer/src/bin/main.rs
+++ b/crates/rust-analyzer/src/bin/main.rs
@@ -1,14 +1,20 @@
1//! Driver for rust-analyzer. 1//! Driver for rust-analyzer.
2//! 2//!
3//! Based on cli flags, either spawns an LSP server, or runs a batch analysis 3//! Based on cli flags, either spawns an LSP server, or runs a batch analysis
4mod args; 4mod flags;
5mod logger; 5mod logger;
6 6
7use std::{convert::TryFrom, env, fs, path::PathBuf, process}; 7use std::{convert::TryFrom, env, fs, path::Path, process};
8 8
9use lsp_server::Connection; 9use lsp_server::Connection;
10use project_model::ProjectManifest; 10use project_model::ProjectManifest;
11use rust_analyzer::{cli, config::Config, from_json, lsp_ext::supports_utf8, Result}; 11use rust_analyzer::{
12 cli::{self, AnalysisStatsCmd, BenchCmd},
13 config::Config,
14 from_json,
15 lsp_ext::supports_utf8,
16 Result,
17};
12use vfs::AbsPathBuf; 18use vfs::AbsPathBuf;
13 19
14#[cfg(all(feature = "mimalloc"))] 20#[cfg(all(feature = "mimalloc"))]
@@ -28,10 +34,10 @@ fn main() {
28} 34}
29 35
30fn try_main() -> Result<()> { 36fn try_main() -> Result<()> {
31 let args = args::Args::parse()?; 37 let flags = flags::RustAnalyzer::from_env()?;
32 38
33 #[cfg(debug_assertions)] 39 #[cfg(debug_assertions)]
34 if args.wait_dbg || env::var("RA_WAIT_DBG").is_ok() { 40 if flags.wait_dbg || env::var("RA_WAIT_DBG").is_ok() {
35 #[allow(unused_mut)] 41 #[allow(unused_mut)]
36 let mut d = 4; 42 let mut d = 4;
37 while d == 4 { 43 while d == 4 {
@@ -39,35 +45,62 @@ fn try_main() -> Result<()> {
39 } 45 }
40 } 46 }
41 47
42 setup_logging(args.log_file, args.no_buffering)?; 48 setup_logging(flags.log_file.as_deref(), flags.no_log_buffering)?;
43 match args.command { 49 let verbosity = flags.verbosity();
44 args::Command::RunServer => run_server()?, 50
45 args::Command::PrintConfigSchema => { 51 match flags.subcommand {
46 println!("{:#}", Config::json_schema()); 52 flags::RustAnalyzerCmd::LspServer(cmd) => {
53 if cmd.print_config_schema {
54 println!("{:#}", Config::json_schema());
55 return Ok(());
56 }
57 if cmd.version {
58 println!("rust-analyzer {}", env!("REV"));
59 return Ok(());
60 }
61 if cmd.help {
62 println!("{}", flags::RustAnalyzer::HELP);
63 return Ok(());
64 }
65 run_server()?
47 } 66 }
48 args::Command::ProcMacro => proc_macro_srv::cli::run()?, 67 flags::RustAnalyzerCmd::ProcMacro(_) => proc_macro_srv::cli::run()?,
49 68 flags::RustAnalyzerCmd::Parse(cmd) => cli::parse(cmd.no_dump)?,
50 args::Command::Parse { no_dump } => cli::parse(no_dump)?, 69 flags::RustAnalyzerCmd::Symbols(_) => cli::symbols()?,
51 args::Command::Symbols => cli::symbols()?, 70 flags::RustAnalyzerCmd::Highlight(cmd) => cli::highlight(cmd.rainbow)?,
52 args::Command::Highlight { rainbow } => cli::highlight(rainbow)?, 71 flags::RustAnalyzerCmd::AnalysisStats(cmd) => AnalysisStatsCmd {
53 args::Command::AnalysisStats(cmd) => cmd.run(args.verbosity)?, 72 randomize: cmd.randomize,
54 args::Command::Bench(cmd) => cmd.run(args.verbosity)?, 73 parallel: cmd.parallel,
55 args::Command::Diagnostics { path, load_output_dirs, with_proc_macro } => { 74 memory_usage: cmd.memory_usage,
56 cli::diagnostics(path.as_ref(), load_output_dirs, with_proc_macro)? 75 only: cmd.only,
76 with_deps: cmd.with_deps,
77 path: cmd.path,
78 load_output_dirs: cmd.load_output_dirs,
79 with_proc_macro: cmd.with_proc_macro,
57 } 80 }
58 args::Command::Ssr { rules } => { 81 .run(verbosity)?,
59 cli::apply_ssr_rules(rules)?; 82 flags::RustAnalyzerCmd::AnalysisBench(cmd) => {
83 let what = cmd.what();
84 BenchCmd {
85 memory_usage: cmd.memory_usage,
86 path: cmd.path,
87 load_output_dirs: cmd.load_output_dirs,
88 with_proc_macro: cmd.with_proc_macro,
89 what,
90 }
91 .run(verbosity)?
60 } 92 }
61 args::Command::StructuredSearch { patterns, debug_snippet } => { 93
62 cli::search_for_patterns(patterns, debug_snippet)?; 94 flags::RustAnalyzerCmd::Diagnostics(cmd) => {
95 cli::diagnostics(&cmd.path, cmd.load_output_dirs, cmd.with_proc_macro)?
63 } 96 }
64 args::Command::Version => println!("rust-analyzer {}", env!("REV")), 97 flags::RustAnalyzerCmd::Ssr(cmd) => cli::apply_ssr_rules(cmd.rule)?,
65 args::Command::Help => {} 98 flags::RustAnalyzerCmd::Search(cmd) => cli::search_for_patterns(cmd.pattern, cmd.debug)?,
66 } 99 }
67 Ok(()) 100 Ok(())
68} 101}
69 102
70fn setup_logging(log_file: Option<PathBuf>, no_buffering: bool) -> Result<()> { 103fn setup_logging(log_file: Option<&Path>, no_buffering: bool) -> Result<()> {
71 env::set_var("RUST_BACKTRACE", "short"); 104 env::set_var("RUST_BACKTRACE", "short");
72 105
73 let log_file = match log_file { 106 let log_file = match log_file {
diff --git a/crates/rust-analyzer/src/cli/analysis_bench.rs b/crates/rust-analyzer/src/cli/analysis_bench.rs
index 8991f3bdb..3bd7e678d 100644
--- a/crates/rust-analyzer/src/cli/analysis_bench.rs
+++ b/crates/rust-analyzer/src/cli/analysis_bench.rs
@@ -35,6 +35,7 @@ pub enum BenchWhat {
35 GotoDef(Position), 35 GotoDef(Position),
36} 36}
37 37
38#[derive(Debug, Clone)]
38pub struct Position { 39pub struct Position {
39 pub path: AbsPathBuf, 40 pub path: AbsPathBuf,
40 pub line: u32, 41 pub line: u32,
@@ -68,7 +69,7 @@ impl BenchCmd {
68 load_out_dirs_from_check: self.load_output_dirs, 69 load_out_dirs_from_check: self.load_output_dirs,
69 with_proc_macro: self.with_proc_macro, 70 with_proc_macro: self.with_proc_macro,
70 }; 71 };
71 let (mut host, vfs) = 72 let (mut host, vfs, _proc_macro) =
72 load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?; 73 load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?;
73 eprintln!("{:?}\n", start.elapsed()); 74 eprintln!("{:?}\n", start.elapsed());
74 75
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index 9072d8944..ad0759bda 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -64,7 +64,7 @@ impl AnalysisStatsCmd {
64 load_out_dirs_from_check: self.load_output_dirs, 64 load_out_dirs_from_check: self.load_output_dirs,
65 with_proc_macro: self.with_proc_macro, 65 with_proc_macro: self.with_proc_macro,
66 }; 66 };
67 let (host, vfs) = 67 let (host, vfs, _proc_macro) =
68 load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?; 68 load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?;
69 let db = host.raw_database(); 69 let db = host.raw_database();
70 eprintln!("{:<20} {}", "Database loaded:", db_load_sw.elapsed()); 70 eprintln!("{:<20} {}", "Database loaded:", db_load_sw.elapsed());
diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs
index 876f6c44f..8b985716b 100644
--- a/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -35,7 +35,8 @@ pub fn diagnostics(
35) -> Result<()> { 35) -> Result<()> {
36 let cargo_config = Default::default(); 36 let cargo_config = Default::default();
37 let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check, with_proc_macro }; 37 let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check, with_proc_macro };
38 let (host, _vfs) = load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {})?; 38 let (host, _vfs, _proc_macro) =
39 load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {})?;
39 let db = host.raw_database(); 40 let db = host.raw_database();
40 let analysis = host.analysis(); 41 let analysis = host.analysis();
41 42
diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs
index 23442afac..310c36904 100644
--- a/crates/rust-analyzer/src/cli/load_cargo.rs
+++ b/crates/rust-analyzer/src/cli/load_cargo.rs
@@ -23,7 +23,7 @@ pub fn load_workspace_at(
23 cargo_config: &CargoConfig, 23 cargo_config: &CargoConfig,
24 load_config: &LoadCargoConfig, 24 load_config: &LoadCargoConfig,
25 progress: &dyn Fn(String), 25 progress: &dyn Fn(String),
26) -> Result<(AnalysisHost, vfs::Vfs)> { 26) -> Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroClient>)> {
27 let root = AbsPathBuf::assert(std::env::current_dir()?.join(root)); 27 let root = AbsPathBuf::assert(std::env::current_dir()?.join(root));
28 let root = ProjectManifest::discover_single(&root)?; 28 let root = ProjectManifest::discover_single(&root)?;
29 let workspace = ProjectWorkspace::load(root, cargo_config, progress)?; 29 let workspace = ProjectWorkspace::load(root, cargo_config, progress)?;
@@ -35,7 +35,7 @@ pub fn load_workspace(
35 ws: ProjectWorkspace, 35 ws: ProjectWorkspace,
36 config: &LoadCargoConfig, 36 config: &LoadCargoConfig,
37 progress: &dyn Fn(String), 37 progress: &dyn Fn(String),
38) -> Result<(AnalysisHost, vfs::Vfs)> { 38) -> Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroClient>)> {
39 let (sender, receiver) = unbounded(); 39 let (sender, receiver) = unbounded();
40 let mut vfs = vfs::Vfs::default(); 40 let mut vfs = vfs::Vfs::default();
41 let mut loader = { 41 let mut loader = {
@@ -80,7 +80,7 @@ pub fn load_workspace(
80 log::debug!("crate graph: {:?}", crate_graph); 80 log::debug!("crate graph: {:?}", crate_graph);
81 let host = 81 let host =
82 load_crate_graph(crate_graph, project_folders.source_root_config, &mut vfs, &receiver); 82 load_crate_graph(crate_graph, project_folders.source_root_config, &mut vfs, &receiver);
83 Ok((host, vfs)) 83 Ok((host, vfs, proc_macro_client))
84} 84}
85 85
86fn load_crate_graph( 86fn load_crate_graph(
@@ -138,7 +138,8 @@ mod tests {
138 let cargo_config = Default::default(); 138 let cargo_config = Default::default();
139 let load_cargo_config = 139 let load_cargo_config =
140 LoadCargoConfig { load_out_dirs_from_check: false, with_proc_macro: false }; 140 LoadCargoConfig { load_out_dirs_from_check: false, with_proc_macro: false };
141 let (host, _vfs) = load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {})?; 141 let (host, _vfs, _proc_macro) =
142 load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {})?;
142 143
143 let n_crates = Crate::all(host.raw_database()).len(); 144 let n_crates = Crate::all(host.raw_database()).len();
144 // RA has quite a few crates, but the exact count doesn't matter 145 // RA has quite a few crates, but the exact count doesn't matter
diff --git a/crates/rust-analyzer/src/cli/ssr.rs b/crates/rust-analyzer/src/cli/ssr.rs
index 71a8f8fb9..79f426fff 100644
--- a/crates/rust-analyzer/src/cli/ssr.rs
+++ b/crates/rust-analyzer/src/cli/ssr.rs
@@ -11,7 +11,7 @@ pub fn apply_ssr_rules(rules: Vec<SsrRule>) -> Result<()> {
11 let cargo_config = Default::default(); 11 let cargo_config = Default::default();
12 let load_cargo_config = 12 let load_cargo_config =
13 LoadCargoConfig { load_out_dirs_from_check: true, with_proc_macro: true }; 13 LoadCargoConfig { load_out_dirs_from_check: true, with_proc_macro: true };
14 let (host, vfs) = 14 let (host, vfs, _proc_macro) =
15 load_workspace_at(&std::env::current_dir()?, &cargo_config, &load_cargo_config, &|_| {})?; 15 load_workspace_at(&std::env::current_dir()?, &cargo_config, &load_cargo_config, &|_| {})?;
16 let db = host.raw_database(); 16 let db = host.raw_database();
17 let mut match_finder = MatchFinder::at_first_file(db)?; 17 let mut match_finder = MatchFinder::at_first_file(db)?;
@@ -38,7 +38,7 @@ pub fn search_for_patterns(patterns: Vec<SsrPattern>, debug_snippet: Option<Stri
38 let cargo_config = Default::default(); 38 let cargo_config = Default::default();
39 let load_cargo_config = 39 let load_cargo_config =
40 LoadCargoConfig { load_out_dirs_from_check: true, with_proc_macro: true }; 40 LoadCargoConfig { load_out_dirs_from_check: true, with_proc_macro: true };
41 let (host, _vfs) = 41 let (host, _vfs, _proc_macro) =
42 load_workspace_at(&std::env::current_dir()?, &cargo_config, &load_cargo_config, &|_| {})?; 42 load_workspace_at(&std::env::current_dir()?, &cargo_config, &load_cargo_config, &|_| {})?;
43 let db = host.raw_database(); 43 let db = host.raw_database();
44 let mut match_finder = MatchFinder::at_first_file(db)?; 44 let mut match_finder = MatchFinder::at_first_file(db)?;
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 556fc2eeb..367136702 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -46,8 +46,8 @@ config_data! {
46 cargo_allFeatures: bool = "false", 46 cargo_allFeatures: bool = "false",
47 /// List of features to activate. 47 /// List of features to activate.
48 cargo_features: Vec<String> = "[]", 48 cargo_features: Vec<String> = "[]",
49 /// Run `cargo check` on startup to get the correct value for package 49 /// Run build scripts (`build.rs`) for more precise code analysis.
50 /// OUT_DIRs. 50 cargo_runBuildScripts |
51 cargo_loadOutDirsFromCheck: bool = "false", 51 cargo_loadOutDirsFromCheck: bool = "false",
52 /// Do not activate the `default` feature. 52 /// Do not activate the `default` feature.
53 cargo_noDefaultFeatures: bool = "false", 53 cargo_noDefaultFeatures: bool = "false",
@@ -167,8 +167,7 @@ config_data! {
167 /// Whether to show `can't find Cargo.toml` error message. 167 /// Whether to show `can't find Cargo.toml` error message.
168 notifications_cargoTomlNotFound: bool = "true", 168 notifications_cargoTomlNotFound: bool = "true",
169 169
170 /// Enable Proc macro support, `#rust-analyzer.cargo.loadOutDirsFromCheck#` must be 170 /// Enable support for procedural macros, implies `#rust-analyzer.cargo.runBuildScripts#`.
171 /// enabled.
172 procMacro_enable: bool = "false", 171 procMacro_enable: bool = "false",
173 /// Internal config, path to proc-macro server executable (typically, 172 /// Internal config, path to proc-macro server executable (typically,
174 /// this is rust-analyzer itself, but we override this in tests). 173 /// this is rust-analyzer itself, but we override this in tests).
@@ -480,8 +479,8 @@ impl Config {
480 pub fn cargo_autoreload(&self) -> bool { 479 pub fn cargo_autoreload(&self) -> bool {
481 self.data.cargo_autoreload 480 self.data.cargo_autoreload
482 } 481 }
483 pub fn load_out_dirs_from_check(&self) -> bool { 482 pub fn run_build_scripts(&self) -> bool {
484 self.data.cargo_loadOutDirsFromCheck 483 self.data.cargo_runBuildScripts || self.data.procMacro_enable
485 } 484 }
486 pub fn cargo(&self) -> CargoConfig { 485 pub fn cargo(&self) -> CargoConfig {
487 let rustc_source = self.data.rustcSource.as_ref().map(|rustc_src| { 486 let rustc_source = self.data.rustcSource.as_ref().map(|rustc_src| {
diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs
index 0ed87fe3e..0ad832c0e 100644
--- a/crates/rust-analyzer/src/diagnostics/to_proto.rs
+++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs
@@ -29,7 +29,7 @@ fn diagnostic_severity(
29 }, 29 },
30 DiagnosticLevel::Note => lsp_types::DiagnosticSeverity::Information, 30 DiagnosticLevel::Note => lsp_types::DiagnosticSeverity::Information,
31 DiagnosticLevel::Help => lsp_types::DiagnosticSeverity::Hint, 31 DiagnosticLevel::Help => lsp_types::DiagnosticSeverity::Hint,
32 DiagnosticLevel::Unknown => return None, 32 _ => return None,
33 }; 33 };
34 Some(res) 34 Some(res)
35} 35}
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 2829d5970..f0cb309e4 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -312,7 +312,7 @@ impl GlobalState {
312 } else { 312 } else {
313 assert_eq!(n_done, n_total); 313 assert_eq!(n_done, n_total);
314 new_status = Status::Ready { 314 new_status = Status::Ready {
315 partial: self.config.load_out_dirs_from_check() 315 partial: self.config.run_build_scripts()
316 && self.workspace_build_data.is_none() 316 && self.workspace_build_data.is_none()
317 || config_version < self.vfs_config_version, 317 || config_version < self.vfs_config_version,
318 }; 318 };
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index c07efa330..aa8504c3d 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -337,7 +337,7 @@ impl GlobalState {
337 }; 337 };
338 change.set_crate_graph(crate_graph); 338 change.set_crate_graph(crate_graph);
339 339
340 if self.config.load_out_dirs_from_check() && workspace_build_data.is_none() { 340 if self.config.run_build_scripts() && workspace_build_data.is_none() {
341 let mut collector = BuildDataCollector::default(); 341 let mut collector = BuildDataCollector::default();
342 for ws in &workspaces { 342 for ws in &workspaces {
343 ws.collect_build_data_configs(&mut collector); 343 ws.collect_build_data_configs(&mut collector);
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs
index 70cb7fbab..c1ca88df6 100644
--- a/crates/rust-analyzer/src/to_proto.rs
+++ b/crates/rust-analyzer/src/to_proto.rs
@@ -474,7 +474,7 @@ pub(crate) fn folding_range(
474 let kind = match fold.kind { 474 let kind = match fold.kind {
475 FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment), 475 FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment),
476 FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports), 476 FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports),
477 FoldKind::Mods | FoldKind::Block | FoldKind::ArgList => None, 477 FoldKind::Mods | FoldKind::Block | FoldKind::ArgList | FoldKind::Region => None,
478 }; 478 };
479 479
480 let range = range(line_index, fold.range); 480 let range = range(line_index, fold.range);
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml
index d836c5d1a..9ee3a8586 100644
--- a/crates/syntax/Cargo.toml
+++ b/crates/syntax/Cargo.toml
@@ -13,7 +13,7 @@ doctest = false
13[dependencies] 13[dependencies]
14itertools = "0.10.0" 14itertools = "0.10.0"
15rowan = "0.12.2" 15rowan = "0.12.2"
16rustc_lexer = { version = "708.0.0", package = "rustc-ap-rustc_lexer" } 16rustc_lexer = { version = "709.0.0", package = "rustc-ap-rustc_lexer" }
17rustc-hash = "1.1.0" 17rustc-hash = "1.1.0"
18arrayvec = "0.5.1" 18arrayvec = "0.5.1"
19once_cell = "1.3.1" 19once_cell = "1.3.1"
diff --git a/crates/syntax/src/ast/edit.rs b/crates/syntax/src/ast/edit.rs
index 824ebf41c..0b3b76d4a 100644
--- a/crates/syntax/src/ast/edit.rs
+++ b/crates/syntax/src/ast/edit.rs
@@ -595,11 +595,14 @@ impl ops::Add<u8> for IndentLevel {
595 595
596impl IndentLevel { 596impl IndentLevel {
597 pub fn from_node(node: &SyntaxNode) -> IndentLevel { 597 pub fn from_node(node: &SyntaxNode) -> IndentLevel {
598 let first_token = match node.first_token() { 598 match node.first_token() {
599 Some(it) => it, 599 Some(it) => Self::from_token(&it),
600 None => return IndentLevel(0), 600 None => return IndentLevel(0),
601 }; 601 }
602 for ws in prev_tokens(first_token).filter_map(ast::Whitespace::cast) { 602 }
603
604 pub fn from_token(token: &SyntaxToken) -> IndentLevel {
605 for ws in prev_tokens(token.clone()).filter_map(ast::Whitespace::cast) {
603 let text = ws.syntax().text(); 606 let text = ws.syntax().text();
604 if let Some(pos) = text.rfind('\n') { 607 if let Some(pos) = text.rfind('\n') {
605 let level = text[pos + 1..].chars().count() / 4; 608 let level = text[pos + 1..].chars().count() / 4;
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs
index 044e3e5e8..977eb8181 100644
--- a/crates/syntax/src/ast/token_ext.rs
+++ b/crates/syntax/src/ast/token_ext.rs
@@ -85,8 +85,9 @@ pub enum CommentPlacement {
85} 85}
86 86
87impl CommentKind { 87impl CommentKind {
88 const BY_PREFIX: [(&'static str, CommentKind); 8] = [ 88 const BY_PREFIX: [(&'static str, CommentKind); 9] = [
89 ("/**/", CommentKind { shape: CommentShape::Block, doc: None }), 89 ("/**/", CommentKind { shape: CommentShape::Block, doc: None }),
90 ("/***", CommentKind { shape: CommentShape::Block, doc: None }),
90 ("////", CommentKind { shape: CommentShape::Line, doc: None }), 91 ("////", CommentKind { shape: CommentShape::Line, doc: None }),
91 ("///", CommentKind { shape: CommentShape::Line, doc: Some(CommentPlacement::Outer) }), 92 ("///", CommentKind { shape: CommentShape::Line, doc: Some(CommentPlacement::Outer) }),
92 ("//!", CommentKind { shape: CommentShape::Line, doc: Some(CommentPlacement::Inner) }), 93 ("//!", CommentKind { shape: CommentShape::Line, doc: Some(CommentPlacement::Inner) }),