aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock47
-rw-r--r--crates/assists/src/handlers/generate_impl.rs11
-rw-r--r--crates/assists/src/handlers/generate_new.rs8
-rw-r--r--crates/assists/src/handlers/raw_string.rs2
-rw-r--r--crates/assists/src/handlers/replace_derive_with_manual_impl.rs19
-rw-r--r--crates/assists/src/utils.rs2
-rw-r--r--crates/completion/src/completions/unqualified_path.rs24
-rw-r--r--crates/hir/src/code_model.rs8
-rw-r--r--crates/hir/src/semantics.rs2
-rw-r--r--crates/hir_def/src/find_path.rs14
-rw-r--r--crates/hir_def/src/import_map.rs2
-rw-r--r--crates/hir_def/src/lang_item.rs2
-rw-r--r--crates/hir_def/src/nameres.rs12
-rw-r--r--crates/hir_def/src/resolver.rs25
-rw-r--r--crates/hir_def/src/test_db.rs6
-rw-r--r--crates/hir_expand/src/builtin_derive.rs2
-rw-r--r--crates/hir_expand/src/db.rs2
-rw-r--r--crates/hir_expand/src/name.rs8
-rw-r--r--crates/hir_ty/src/diagnostics.rs2
-rw-r--r--crates/hir_ty/src/method_resolution.rs4
-rw-r--r--crates/hir_ty/src/test_db.rs4
-rw-r--r--crates/ide/src/display/navigation_target.rs3
-rw-r--r--crates/ide/src/display/short_label.rs4
-rw-r--r--crates/ide/src/extend_selection.rs4
-rw-r--r--crates/ide/src/inlay_hints.rs2
-rw-r--r--crates/ide/src/join_lines.rs2
-rw-r--r--crates/ide/src/runnables.rs67
-rw-r--r--crates/ide/src/syntax_highlighting/format.rs2
-rw-r--r--crates/ide/src/syntax_highlighting/inject.rs2
-rw-r--r--crates/ide_db/src/defs.rs2
-rw-r--r--crates/ide_db/src/helpers/insert_use.rs2
-rw-r--r--crates/ide_db/src/symbol_index.rs6
-rw-r--r--crates/mbe/src/syntax_bridge.rs6
-rw-r--r--crates/rust-analyzer/Cargo.toml2
-rw-r--r--crates/rust-analyzer/src/bin/args.rs23
-rw-r--r--crates/ssr/src/matching.rs13
-rw-r--r--crates/ssr/src/replacing.rs2
-rw-r--r--crates/ssr/src/resolving.rs2
-rw-r--r--crates/syntax/Cargo.toml5
-rw-r--r--crates/syntax/src/algo.rs7
-rw-r--r--crates/syntax/src/ast.rs4
-rw-r--r--crates/syntax/src/ast/make.rs8
-rw-r--r--crates/syntax/src/ast/node_ext.rs10
-rw-r--r--crates/syntax/src/ast/token_ext.rs12
-rw-r--r--crates/syntax/src/lib.rs4
-rw-r--r--crates/syntax/src/parsing/reparsing.rs3
-rw-r--r--crates/syntax/src/parsing/text_tree_sink.rs4
-rw-r--r--crates/syntax/src/syntax_node.rs4
-rw-r--r--crates/syntax/src/validation.rs2
-rw-r--r--docs/dev/style.md37
-rw-r--r--xtask/Cargo.toml2
-rw-r--r--xtask/src/main.rs34
52 files changed, 284 insertions, 202 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 901784bec..88153bdc5 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -323,7 +323,7 @@ dependencies = [
323 "const_fn", 323 "const_fn",
324 "crossbeam-utils 0.8.1", 324 "crossbeam-utils 0.8.1",
325 "lazy_static", 325 "lazy_static",
326 "memoffset 0.6.1", 326 "memoffset",
327 "scopeguard", 327 "scopeguard",
328] 328]
329 329
@@ -942,15 +942,6 @@ dependencies = [
942 942
943[[package]] 943[[package]]
944name = "memoffset" 944name = "memoffset"
945version = "0.5.6"
946source = "registry+https://github.com/rust-lang/crates.io-index"
947checksum = "043175f069eda7b85febe4a74abbaeff828d9f8b448515d3151a14a3542811aa"
948dependencies = [
949 "autocfg",
950]
951
952[[package]]
953name = "memoffset"
954version = "0.6.1" 945version = "0.6.1"
955source = "registry+https://github.com/rust-lang/crates.io-index" 946source = "registry+https://github.com/rust-lang/crates.io-index"
956checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87" 947checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87"
@@ -1205,9 +1196,9 @@ dependencies = [
1205 1196
1206[[package]] 1197[[package]]
1207name = "pico-args" 1198name = "pico-args"
1208version = "0.3.4" 1199version = "0.4.0"
1209source = "registry+https://github.com/rust-lang/crates.io-index" 1200source = "registry+https://github.com/rust-lang/crates.io-index"
1210checksum = "28b9b4df73455c861d7cbf8be42f01d3b373ed7f02e378d55fa84eafc6f638b1" 1201checksum = "d70072c20945e1ab871c472a285fc772aefd4f5407723c206242f2c6f94595d6"
1211 1202
1212[[package]] 1203[[package]]
1213name = "pin-project-lite" 1204name = "pin-project-lite"
@@ -1384,15 +1375,14 @@ checksum = "b5eb417147ba9860a96cfe72a0b93bf88fee1744b5636ec99ab20c1aa9376581"
1384 1375
1385[[package]] 1376[[package]]
1386name = "rowan" 1377name = "rowan"
1387version = "0.10.6" 1378version = "0.12.0"
1388source = "registry+https://github.com/rust-lang/crates.io-index" 1379source = "registry+https://github.com/rust-lang/crates.io-index"
1389checksum = "8a0734142c18710f7214dc21908e2f054e973b908dbb1a602a3e6691615aaaae" 1380checksum = "bea4527c692099becd37ec777cfd6949d0534348528d2fc84ee420d2d5fac83d"
1390dependencies = [ 1381dependencies = [
1391 "hashbrown", 1382 "hashbrown",
1383 "memoffset",
1392 "rustc-hash", 1384 "rustc-hash",
1393 "smol_str",
1394 "text-size", 1385 "text-size",
1395 "triomphe",
1396] 1386]
1397 1387
1398[[package]] 1388[[package]]
@@ -1544,18 +1534,18 @@ dependencies = [
1544 1534
1545[[package]] 1535[[package]]
1546name = "serde" 1536name = "serde"
1547version = "1.0.119" 1537version = "1.0.120"
1548source = "registry+https://github.com/rust-lang/crates.io-index" 1538source = "registry+https://github.com/rust-lang/crates.io-index"
1549checksum = "9bdd36f49e35b61d49efd8aa7fc068fd295961fd2286d0b2ee9a4c7a14e99cc3" 1539checksum = "166b2349061381baf54a58e4b13c89369feb0ef2eaa57198899e2312aac30aab"
1550dependencies = [ 1540dependencies = [
1551 "serde_derive", 1541 "serde_derive",
1552] 1542]
1553 1543
1554[[package]] 1544[[package]]
1555name = "serde_derive" 1545name = "serde_derive"
1556version = "1.0.119" 1546version = "1.0.120"
1557source = "registry+https://github.com/rust-lang/crates.io-index" 1547source = "registry+https://github.com/rust-lang/crates.io-index"
1558checksum = "552954ce79a059ddd5fd68c271592374bd15cab2274970380c000118aeffe1cd" 1548checksum = "0ca2a8cb5805ce9e3b95435e3765b7b553cecc762d938d409434338386cb5775"
1559dependencies = [ 1549dependencies = [
1560 "proc-macro2", 1550 "proc-macro2",
1561 "quote", 1551 "quote",
@@ -1639,12 +1629,6 @@ dependencies = [
1639] 1629]
1640 1630
1641[[package]] 1631[[package]]
1642name = "stable_deref_trait"
1643version = "1.2.0"
1644source = "registry+https://github.com/rust-lang/crates.io-index"
1645checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
1646
1647[[package]]
1648name = "stdx" 1632name = "stdx"
1649version = "0.0.0" 1633version = "0.0.0"
1650dependencies = [ 1634dependencies = [
@@ -1872,17 +1856,6 @@ dependencies = [
1872] 1856]
1873 1857
1874[[package]] 1858[[package]]
1875name = "triomphe"
1876version = "0.1.2"
1877source = "registry+https://github.com/rust-lang/crates.io-index"
1878checksum = "6e9d872053cf9e5a833d8c1dd772cdc38ab66a908129d6f73c049c986161d07c"
1879dependencies = [
1880 "memoffset 0.5.6",
1881 "serde",
1882 "stable_deref_trait",
1883]
1884
1885[[package]]
1886name = "tt" 1859name = "tt"
1887version = "0.0.0" 1860version = "0.0.0"
1888dependencies = [ 1861dependencies = [
diff --git a/crates/assists/src/handlers/generate_impl.rs b/crates/assists/src/handlers/generate_impl.rs
index 9af45192b..827477272 100644
--- a/crates/assists/src/handlers/generate_impl.rs
+++ b/crates/assists/src/handlers/generate_impl.rs
@@ -1,6 +1,9 @@
1use itertools::Itertools; 1use itertools::Itertools;
2use stdx::format_to; 2use stdx::format_to;
3use syntax::ast::{self, AstNode, AttrsOwner, GenericParamsOwner, NameOwner}; 3use syntax::{
4 ast::{self, AstNode, AttrsOwner, GenericParamsOwner, NameOwner},
5 SmolStr,
6};
4 7
5use crate::{AssistContext, AssistId, AssistKind, Assists}; 8use crate::{AssistContext, AssistId, AssistKind, Assists};
6 9
@@ -49,16 +52,16 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()
49 format_to!(buf, "{}", type_params.syntax()); 52 format_to!(buf, "{}", type_params.syntax());
50 } 53 }
51 buf.push_str(" "); 54 buf.push_str(" ");
52 buf.push_str(name.text().as_str()); 55 buf.push_str(name.text());
53 if let Some(type_params) = type_params { 56 if let Some(type_params) = type_params {
54 let lifetime_params = type_params 57 let lifetime_params = type_params
55 .lifetime_params() 58 .lifetime_params()
56 .filter_map(|it| it.lifetime()) 59 .filter_map(|it| it.lifetime())
57 .map(|it| it.text().clone()); 60 .map(|it| SmolStr::from(it.text()));
58 let type_params = type_params 61 let type_params = type_params
59 .type_params() 62 .type_params()
60 .filter_map(|it| it.name()) 63 .filter_map(|it| it.name())
61 .map(|it| it.text().clone()); 64 .map(|it| SmolStr::from(it.text()));
62 65
63 let generic_params = lifetime_params.chain(type_params).format(", "); 66 let generic_params = lifetime_params.chain(type_params).format(", ");
64 format_to!(buf, "<{}>", generic_params) 67 format_to!(buf, "<{}>", generic_params)
diff --git a/crates/assists/src/handlers/generate_new.rs b/crates/assists/src/handlers/generate_new.rs
index 5c52b2bc8..b7390855a 100644
--- a/crates/assists/src/handlers/generate_new.rs
+++ b/crates/assists/src/handlers/generate_new.rs
@@ -3,7 +3,7 @@ use itertools::Itertools;
3use stdx::format_to; 3use stdx::format_to;
4use syntax::{ 4use syntax::{
5 ast::{self, AstNode, GenericParamsOwner, NameOwner, StructKind, VisibilityOwner}, 5 ast::{self, AstNode, GenericParamsOwner, NameOwner, StructKind, VisibilityOwner},
6 T, 6 SmolStr, T,
7}; 7};
8 8
9use crate::{AssistContext, AssistId, AssistKind, Assists}; 9use crate::{AssistContext, AssistId, AssistKind, Assists};
@@ -95,14 +95,14 @@ fn generate_impl_text(strukt: &ast::Struct, code: &str) -> String {
95 format_to!(buf, "{}", type_params.syntax()); 95 format_to!(buf, "{}", type_params.syntax());
96 } 96 }
97 buf.push_str(" "); 97 buf.push_str(" ");
98 buf.push_str(strukt.name().unwrap().text().as_str()); 98 buf.push_str(strukt.name().unwrap().text());
99 if let Some(type_params) = type_params { 99 if let Some(type_params) = type_params {
100 let lifetime_params = type_params 100 let lifetime_params = type_params
101 .lifetime_params() 101 .lifetime_params()
102 .filter_map(|it| it.lifetime()) 102 .filter_map(|it| it.lifetime())
103 .map(|it| it.text().clone()); 103 .map(|it| SmolStr::from(it.text()));
104 let type_params = 104 let type_params =
105 type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone()); 105 type_params.type_params().filter_map(|it| it.name()).map(|it| SmolStr::from(it.text()));
106 format_to!(buf, "<{}>", lifetime_params.chain(type_params).format(", ")) 106 format_to!(buf, "<{}>", lifetime_params.chain(type_params).format(", "))
107 } 107 }
108 108
diff --git a/crates/assists/src/handlers/raw_string.rs b/crates/assists/src/handlers/raw_string.rs
index be963f162..d95267607 100644
--- a/crates/assists/src/handlers/raw_string.rs
+++ b/crates/assists/src/handlers/raw_string.rs
@@ -138,7 +138,7 @@ pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
138 return None; 138 return None;
139 } 139 }
140 140
141 let text = token.text().as_str(); 141 let text = token.text();
142 if !text.starts_with("r#") && text.ends_with('#') { 142 if !text.starts_with("r#") && text.ends_with('#') {
143 return None; 143 return None;
144 } 144 }
diff --git a/crates/assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/assists/src/handlers/replace_derive_with_manual_impl.rs
index bd4c1c806..6aa9d2f2c 100644
--- a/crates/assists/src/handlers/replace_derive_with_manual_impl.rs
+++ b/crates/assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -3,7 +3,7 @@ use ide_db::imports_locator;
3use itertools::Itertools; 3use itertools::Itertools;
4use syntax::{ 4use syntax::{
5 ast::{self, make, AstNode}, 5 ast::{self, make, AstNode},
6 Direction, SmolStr, 6 Direction,
7 SyntaxKind::{IDENT, WHITESPACE}, 7 SyntaxKind::{IDENT, WHITESPACE},
8 TextSize, 8 TextSize,
9}; 9};
@@ -43,17 +43,18 @@ pub(crate) fn replace_derive_with_manual_impl(
43) -> Option<()> { 43) -> Option<()> {
44 let attr = ctx.find_node_at_offset::<ast::Attr>()?; 44 let attr = ctx.find_node_at_offset::<ast::Attr>()?;
45 45
46 let attr_name = attr 46 let has_derive = attr
47 .syntax() 47 .syntax()
48 .descendants_with_tokens() 48 .descendants_with_tokens()
49 .filter(|t| t.kind() == IDENT) 49 .filter(|t| t.kind() == IDENT)
50 .find_map(syntax::NodeOrToken::into_token) 50 .find_map(syntax::NodeOrToken::into_token)
51 .filter(|t| t.text() == "derive")? 51 .filter(|t| t.text() == "derive")
52 .text() 52 .is_some();
53 .clone(); 53 if !has_derive {
54 return None;
55 }
54 56
55 let trait_token = 57 let trait_token = ctx.token_at_offset().find(|t| t.kind() == IDENT && t.text() != "derive")?;
56 ctx.token_at_offset().find(|t| t.kind() == IDENT && *t.text() != attr_name)?;
57 let trait_path = make::path_unqualified(make::path_segment(make::name_ref(trait_token.text()))); 58 let trait_path = make::path_unqualified(make::path_segment(make::name_ref(trait_token.text())));
58 59
59 let annotated_name = attr.syntax().siblings(Direction::Next).find_map(ast::Name::cast)?; 60 let annotated_name = attr.syntax().siblings(Direction::Next).find_map(ast::Name::cast)?;
@@ -176,9 +177,9 @@ fn update_attribute(
176 .syntax() 177 .syntax()
177 .descendants_with_tokens() 178 .descendants_with_tokens()
178 .filter(|t| t.kind() == IDENT) 179 .filter(|t| t.kind() == IDENT)
179 .filter_map(|t| t.into_token().map(|t| t.text().clone())) 180 .filter_map(|t| t.into_token().map(|t| t.text().to_string()))
180 .filter(|t| t != trait_name.text()) 181 .filter(|t| t != trait_name.text())
181 .collect::<Vec<SmolStr>>(); 182 .collect::<Vec<_>>();
182 let has_more_derives = !new_attr_input.is_empty(); 183 let has_more_derives = !new_attr_input.is_empty();
183 184
184 if has_more_derives { 185 if has_more_derives {
diff --git a/crates/assists/src/utils.rs b/crates/assists/src/utils.rs
index fc9f83bab..44c35bafa 100644
--- a/crates/assists/src/utils.rs
+++ b/crates/assists/src/utils.rs
@@ -223,7 +223,7 @@ fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> {
223 let method = mce.name_ref()?; 223 let method = mce.name_ref()?;
224 let arg_list = mce.arg_list()?; 224 let arg_list = mce.arg_list()?;
225 225
226 let method = match method.text().as_str() { 226 let method = match method.text() {
227 "is_some" => "is_none", 227 "is_some" => "is_none",
228 "is_none" => "is_some", 228 "is_none" => "is_some",
229 "is_ok" => "is_err", 229 "is_ok" => "is_err",
diff --git a/crates/completion/src/completions/unqualified_path.rs b/crates/completion/src/completions/unqualified_path.rs
index ac5596ca4..809e1645a 100644
--- a/crates/completion/src/completions/unqualified_path.rs
+++ b/crates/completion/src/completions/unqualified_path.rs
@@ -29,6 +29,10 @@ pub(crate) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionC
29 } 29 }
30 30
31 ctx.scope.process_all_names(&mut |name, res| { 31 ctx.scope.process_all_names(&mut |name, res| {
32 if let ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_)) = res {
33 mark::hit!(skip_lifetime_completion);
34 return;
35 }
32 if ctx.use_item_syntax.is_some() { 36 if ctx.use_item_syntax.is_some() {
33 if let (ScopeDef::Unknown, Some(name_ref)) = (&res, &ctx.name_ref_syntax) { 37 if let (ScopeDef::Unknown, Some(name_ref)) = (&res, &ctx.name_ref_syntax) {
34 if name_ref.syntax().text() == name.to_string().as_str() { 38 if name_ref.syntax().text() == name.to_string().as_str() {
@@ -37,7 +41,7 @@ pub(crate) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionC
37 } 41 }
38 } 42 }
39 } 43 }
40 acc.add_resolution(ctx, name.to_string(), &res) 44 acc.add_resolution(ctx, name.to_string(), &res);
41 }); 45 });
42} 46}
43 47
@@ -234,6 +238,24 @@ fn main() {
234 fn quux() fn quux<T>() 238 fn quux() fn quux<T>()
235 "#]], 239 "#]],
236 ); 240 );
241 check(
242 r#"fn quux<const C: usize>() { $0 }"#,
243 expect![[r#"
244 tp C
245 fn quux() fn quux<const C: usize>()
246 "#]],
247 );
248 }
249
250 #[test]
251 fn does_not_complete_lifetimes() {
252 mark::check!(skip_lifetime_completion);
253 check(
254 r#"fn quux<'a>() { $0 }"#,
255 expect![[r#"
256 fn quux() fn quux<'a>()
257 "#]],
258 );
237 } 259 }
238 260
239 #[test] 261 #[test]
diff --git a/crates/hir/src/code_model.rs b/crates/hir/src/code_model.rs
index 5a4c27906..aaa7013b6 100644
--- a/crates/hir/src/code_model.rs
+++ b/crates/hir/src/code_model.rs
@@ -90,7 +90,7 @@ impl Crate {
90 } 90 }
91 91
92 pub fn root_module(self, db: &dyn HirDatabase) -> Module { 92 pub fn root_module(self, db: &dyn HirDatabase) -> Module {
93 let module_id = db.crate_def_map(self.id).root; 93 let module_id = db.crate_def_map(self.id).root();
94 Module::new(self, module_id) 94 Module::new(self, module_id)
95 } 95 }
96 96
@@ -302,7 +302,7 @@ impl Module {
302 /// in the module tree of any target in `Cargo.toml`. 302 /// in the module tree of any target in `Cargo.toml`.
303 pub fn crate_root(self, db: &dyn HirDatabase) -> Module { 303 pub fn crate_root(self, db: &dyn HirDatabase) -> Module {
304 let def_map = db.crate_def_map(self.id.krate); 304 let def_map = db.crate_def_map(self.id.krate);
305 self.with_module_id(def_map.root) 305 self.with_module_id(def_map.root())
306 } 306 }
307 307
308 /// Iterates over all child modules. 308 /// Iterates over all child modules.
@@ -1000,7 +1000,7 @@ impl MacroDef {
1000 /// early, in `hir_expand`, where modules simply do not exist yet. 1000 /// early, in `hir_expand`, where modules simply do not exist yet.
1001 pub fn module(self, db: &dyn HirDatabase) -> Option<Module> { 1001 pub fn module(self, db: &dyn HirDatabase) -> Option<Module> {
1002 let krate = self.id.krate; 1002 let krate = self.id.krate;
1003 let module_id = db.crate_def_map(krate).root; 1003 let module_id = db.crate_def_map(krate).root();
1004 Some(Module::new(Crate { id: krate }, module_id)) 1004 Some(Module::new(Crate { id: krate }, module_id))
1005 } 1005 }
1006 1006
@@ -2046,7 +2046,7 @@ impl Callable {
2046pub enum ScopeDef { 2046pub enum ScopeDef {
2047 ModuleDef(ModuleDef), 2047 ModuleDef(ModuleDef),
2048 MacroDef(MacroDef), 2048 MacroDef(MacroDef),
2049 GenericParam(TypeParam), 2049 GenericParam(GenericParam),
2050 ImplSelfType(Impl), 2050 ImplSelfType(Impl),
2051 AdtSelfType(Adt), 2051 AdtSelfType(Adt),
2052 Local(Local), 2052 Local(Local),
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index ab213e04c..0a30b4f5b 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -814,7 +814,7 @@ impl<'a> SemanticsScope<'a> {
814 } 814 }
815 resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()), 815 resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
816 resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()), 816 resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
817 resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }), 817 resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
818 resolver::ScopeDef::Local(pat_id) => { 818 resolver::ScopeDef::Local(pat_id) => {
819 let parent = resolver.body_owner().unwrap().into(); 819 let parent = resolver.body_owner().unwrap().into();
820 ScopeDef::Local(Local { parent, pat_id }) 820 ScopeDef::Local(Local { parent, pat_id })
diff --git a/crates/hir_def/src/find_path.rs b/crates/hir_def/src/find_path.rs
index 422a6eeb4..d7b7b9cc0 100644
--- a/crates/hir_def/src/find_path.rs
+++ b/crates/hir_def/src/find_path.rs
@@ -51,7 +51,7 @@ fn check_self_super(def_map: &DefMap, item: ItemInNs, from: ModuleId) -> Option<
51 if item == ItemInNs::Types(from.into()) { 51 if item == ItemInNs::Types(from.into()) {
52 // - if the item is the module we're in, use `self` 52 // - if the item is the module we're in, use `self`
53 Some(ModPath::from_segments(PathKind::Super(0), Vec::new())) 53 Some(ModPath::from_segments(PathKind::Super(0), Vec::new()))
54 } else if let Some(parent_id) = def_map.modules[from.local_id].parent { 54 } else if let Some(parent_id) = def_map[from.local_id].parent {
55 // - if the item is the parent module, use `super` (this is not used recursively, since `super::super` is ugly) 55 // - if the item is the parent module, use `super` (this is not used recursively, since `super::super` is ugly)
56 if item 56 if item
57 == ItemInNs::Types(ModuleDefId::ModuleId(ModuleId { 57 == ItemInNs::Types(ModuleDefId::ModuleId(ModuleId {
@@ -111,7 +111,7 @@ fn find_path_inner(
111 111
112 // - if the item is already in scope, return the name under which it is 112 // - if the item is already in scope, return the name under which it is
113 let def_map = db.crate_def_map(from.krate); 113 let def_map = db.crate_def_map(from.krate);
114 let from_scope: &crate::item_scope::ItemScope = &def_map.modules[from.local_id].scope; 114 let from_scope: &crate::item_scope::ItemScope = &def_map[from.local_id].scope;
115 let scope_name = 115 let scope_name =
116 if let Some((name, _)) = from_scope.name_of(item) { Some(name.clone()) } else { None }; 116 if let Some((name, _)) = from_scope.name_of(item) { Some(name.clone()) } else { None };
117 if prefixed.is_none() && scope_name.is_some() { 117 if prefixed.is_none() && scope_name.is_some() {
@@ -123,7 +123,7 @@ fn find_path_inner(
123 if item 123 if item
124 == ItemInNs::Types(ModuleDefId::ModuleId(ModuleId { 124 == ItemInNs::Types(ModuleDefId::ModuleId(ModuleId {
125 krate: from.krate, 125 krate: from.krate,
126 local_id: def_map.root, 126 local_id: def_map.root(),
127 })) 127 }))
128 { 128 {
129 return Some(ModPath::from_segments(PathKind::Crate, Vec::new())); 129 return Some(ModPath::from_segments(PathKind::Crate, Vec::new()));
@@ -147,7 +147,7 @@ fn find_path_inner(
147 if let Some(prelude_module) = def_map.prelude { 147 if let Some(prelude_module) = def_map.prelude {
148 let prelude_def_map = db.crate_def_map(prelude_module.krate); 148 let prelude_def_map = db.crate_def_map(prelude_module.krate);
149 let prelude_scope: &crate::item_scope::ItemScope = 149 let prelude_scope: &crate::item_scope::ItemScope =
150 &prelude_def_map.modules[prelude_module.local_id].scope; 150 &prelude_def_map[prelude_module.local_id].scope;
151 if let Some((name, vis)) = prelude_scope.name_of(item) { 151 if let Some((name, vis)) = prelude_scope.name_of(item) {
152 if vis.is_visible_from(db, from) { 152 if vis.is_visible_from(db, from) {
153 return Some(ModPath::from_segments(PathKind::Plain, vec![name.clone()])); 153 return Some(ModPath::from_segments(PathKind::Plain, vec![name.clone()]));
@@ -175,7 +175,7 @@ fn find_path_inner(
175 175
176 // - otherwise, look for modules containing (reexporting) it and import it from one of those 176 // - otherwise, look for modules containing (reexporting) it and import it from one of those
177 177
178 let crate_root = ModuleId { local_id: def_map.root, krate: from.krate }; 178 let crate_root = ModuleId { local_id: def_map.root(), krate: from.krate };
179 let crate_attrs = db.attrs(crate_root.into()); 179 let crate_attrs = db.attrs(crate_root.into());
180 let prefer_no_std = crate_attrs.by_key("no_std").exists(); 180 let prefer_no_std = crate_attrs.by_key("no_std").exists();
181 let mut best_path = None; 181 let mut best_path = None;
@@ -287,7 +287,7 @@ fn find_local_import_locations(
287 287
288 // Compute the initial worklist. We start with all direct child modules of `from` as well as all 288 // Compute the initial worklist. We start with all direct child modules of `from` as well as all
289 // of its (recursive) parent modules. 289 // of its (recursive) parent modules.
290 let data = &def_map.modules[from.local_id]; 290 let data = &def_map[from.local_id];
291 let mut worklist = data 291 let mut worklist = data
292 .children 292 .children
293 .values() 293 .values()
@@ -296,7 +296,7 @@ fn find_local_import_locations(
296 let mut parent = data.parent; 296 let mut parent = data.parent;
297 while let Some(p) = parent { 297 while let Some(p) = parent {
298 worklist.push(ModuleId { krate: from.krate, local_id: p }); 298 worklist.push(ModuleId { krate: from.krate, local_id: p });
299 parent = def_map.modules[p].parent; 299 parent = def_map[p].parent;
300 } 300 }
301 301
302 let mut seen: FxHashSet<_> = FxHashSet::default(); 302 let mut seen: FxHashSet<_> = FxHashSet::default();
diff --git a/crates/hir_def/src/import_map.rs b/crates/hir_def/src/import_map.rs
index fac0de90c..0251d016b 100644
--- a/crates/hir_def/src/import_map.rs
+++ b/crates/hir_def/src/import_map.rs
@@ -75,7 +75,7 @@ impl ImportMap {
75 75
76 // We look only into modules that are public(ly reexported), starting with the crate root. 76 // We look only into modules that are public(ly reexported), starting with the crate root.
77 let empty = ImportPath { segments: vec![] }; 77 let empty = ImportPath { segments: vec![] };
78 let root = ModuleId { krate, local_id: def_map.root }; 78 let root = ModuleId { krate, local_id: def_map.root() };
79 let mut worklist = vec![(root, empty)]; 79 let mut worklist = vec![(root, empty)];
80 while let Some((module, mod_path)) = worklist.pop() { 80 while let Some((module, mod_path)) = worklist.pop() {
81 let ext_def_map; 81 let ext_def_map;
diff --git a/crates/hir_def/src/lang_item.rs b/crates/hir_def/src/lang_item.rs
index 30188b740..9e90f745c 100644
--- a/crates/hir_def/src/lang_item.rs
+++ b/crates/hir_def/src/lang_item.rs
@@ -84,7 +84,7 @@ impl LangItems {
84 84
85 let crate_def_map = db.crate_def_map(krate); 85 let crate_def_map = db.crate_def_map(krate);
86 86
87 for (_, module_data) in crate_def_map.modules.iter() { 87 for (_, module_data) in crate_def_map.modules() {
88 for impl_def in module_data.scope.impls() { 88 for impl_def in module_data.scope.impls() {
89 lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDefId) 89 lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDefId)
90 } 90 }
diff --git a/crates/hir_def/src/nameres.rs b/crates/hir_def/src/nameres.rs
index 769a557ad..c3d3efc6b 100644
--- a/crates/hir_def/src/nameres.rs
+++ b/crates/hir_def/src/nameres.rs
@@ -75,8 +75,8 @@ use crate::{
75/// Contains all top-level defs from a macro-expanded crate 75/// Contains all top-level defs from a macro-expanded crate
76#[derive(Debug, PartialEq, Eq)] 76#[derive(Debug, PartialEq, Eq)]
77pub struct DefMap { 77pub struct DefMap {
78 pub root: LocalModuleId, 78 root: LocalModuleId,
79 pub modules: Arena<ModuleData>, 79 modules: Arena<ModuleData>,
80 pub(crate) krate: CrateId, 80 pub(crate) krate: CrateId,
81 /// The prelude module for this crate. This either comes from an import 81 /// The prelude module for this crate. This either comes from an import
82 /// marked with the `prelude_import` attribute, or (in the normal case) from 82 /// marked with the `prelude_import` attribute, or (in the normal case) from
@@ -208,6 +208,14 @@ impl DefMap {
208 .map(|(id, _data)| id) 208 .map(|(id, _data)| id)
209 } 209 }
210 210
211 pub fn modules(&self) -> impl Iterator<Item = (LocalModuleId, &ModuleData)> + '_ {
212 self.modules.iter()
213 }
214
215 pub fn root(&self) -> LocalModuleId {
216 self.root
217 }
218
211 pub(crate) fn resolve_path( 219 pub(crate) fn resolve_path(
212 &self, 220 &self,
213 db: &dyn DefDatabase, 221 db: &dyn DefDatabase,
diff --git a/crates/hir_def/src/resolver.rs b/crates/hir_def/src/resolver.rs
index e7e92c72d..a505bf2be 100644
--- a/crates/hir_def/src/resolver.rs
+++ b/crates/hir_def/src/resolver.rs
@@ -21,8 +21,9 @@ use crate::{
21 per_ns::PerNs, 21 per_ns::PerNs,
22 visibility::{RawVisibility, Visibility}, 22 visibility::{RawVisibility, Visibility},
23 AdtId, AssocContainerId, ConstId, ConstParamId, ContainerId, DefWithBodyId, EnumId, 23 AdtId, AssocContainerId, ConstId, ConstParamId, ContainerId, DefWithBodyId, EnumId,
24 EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, LocalModuleId, Lookup, ModuleDefId, 24 EnumVariantId, FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, LifetimeParamId,
25 ModuleId, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, VariantId, 25 LocalModuleId, Lookup, ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId,
26 TypeParamId, VariantId,
26}; 27};
27 28
28#[derive(Debug, Clone, Default)] 29#[derive(Debug, Clone, Default)]
@@ -484,7 +485,7 @@ pub enum ScopeDef {
484 PerNs(PerNs), 485 PerNs(PerNs),
485 ImplSelfType(ImplId), 486 ImplSelfType(ImplId),
486 AdtSelfType(AdtId), 487 AdtSelfType(AdtId),
487 GenericParam(TypeParamId), 488 GenericParam(GenericParamId),
488 Local(PatId), 489 Local(PatId),
489} 490}
490 491
@@ -527,15 +528,21 @@ impl Scope {
527 Scope::LocalItemsScope(body) => body.item_scope.entries().for_each(|(name, def)| { 528 Scope::LocalItemsScope(body) => body.item_scope.entries().for_each(|(name, def)| {
528 f(name.clone(), ScopeDef::PerNs(def)); 529 f(name.clone(), ScopeDef::PerNs(def));
529 }), 530 }),
530 Scope::GenericParams { params, def } => { 531 &Scope::GenericParams { ref params, def: parent } => {
531 for (local_id, param) in params.types.iter() { 532 for (local_id, param) in params.types.iter() {
532 if let Some(name) = &param.name { 533 if let Some(ref name) = param.name {
533 f( 534 let id = TypeParamId { local_id, parent };
534 name.clone(), 535 f(name.clone(), ScopeDef::GenericParam(id.into()))
535 ScopeDef::GenericParam(TypeParamId { local_id, parent: *def }),
536 )
537 } 536 }
538 } 537 }
538 for (local_id, param) in params.consts.iter() {
539 let id = ConstParamId { local_id, parent };
540 f(param.name.clone(), ScopeDef::GenericParam(id.into()))
541 }
542 for (local_id, param) in params.lifetimes.iter() {
543 let id = LifetimeParamId { local_id, parent };
544 f(param.name.clone(), ScopeDef::GenericParam(id.into()))
545 }
539 } 546 }
540 Scope::ImplDefScope(i) => { 547 Scope::ImplDefScope(i) => {
541 f(name![Self], ScopeDef::ImplSelfType(*i)); 548 f(name![Self], ScopeDef::ImplSelfType(*i));
diff --git a/crates/hir_def/src/test_db.rs b/crates/hir_def/src/test_db.rs
index 574c0201a..4ff219fb7 100644
--- a/crates/hir_def/src/test_db.rs
+++ b/crates/hir_def/src/test_db.rs
@@ -75,7 +75,7 @@ impl TestDB {
75 pub(crate) fn module_for_file(&self, file_id: FileId) -> crate::ModuleId { 75 pub(crate) fn module_for_file(&self, file_id: FileId) -> crate::ModuleId {
76 for &krate in self.relevant_crates(file_id).iter() { 76 for &krate in self.relevant_crates(file_id).iter() {
77 let crate_def_map = self.crate_def_map(krate); 77 let crate_def_map = self.crate_def_map(krate);
78 for (local_id, data) in crate_def_map.modules.iter() { 78 for (local_id, data) in crate_def_map.modules() {
79 if data.origin.file_id() == Some(file_id) { 79 if data.origin.file_id() == Some(file_id) {
80 return crate::ModuleId { krate, local_id }; 80 return crate::ModuleId { krate, local_id };
81 } 81 }
@@ -110,7 +110,7 @@ impl TestDB {
110 let crate_graph = self.crate_graph(); 110 let crate_graph = self.crate_graph();
111 for krate in crate_graph.iter() { 111 for krate in crate_graph.iter() {
112 let crate_def_map = self.crate_def_map(krate); 112 let crate_def_map = self.crate_def_map(krate);
113 for (module_id, _) in crate_def_map.modules.iter() { 113 for (module_id, _) in crate_def_map.modules() {
114 let file_id = crate_def_map[module_id].origin.file_id(); 114 let file_id = crate_def_map[module_id].origin.file_id();
115 files.extend(file_id) 115 files.extend(file_id)
116 } 116 }
@@ -135,7 +135,7 @@ impl TestDB {
135 let crate_def_map = self.crate_def_map(krate); 135 let crate_def_map = self.crate_def_map(krate);
136 136
137 let mut sink = DiagnosticSinkBuilder::new().build(&mut cb); 137 let mut sink = DiagnosticSinkBuilder::new().build(&mut cb);
138 for (module_id, module) in crate_def_map.modules.iter() { 138 for (module_id, module) in crate_def_map.modules() {
139 crate_def_map.add_diagnostics(self, module_id, &mut sink); 139 crate_def_map.add_diagnostics(self, module_id, &mut sink);
140 140
141 for decl in module.scope.declarations() { 141 for decl in module.scope.declarations() {
diff --git a/crates/hir_expand/src/builtin_derive.rs b/crates/hir_expand/src/builtin_derive.rs
index eb257579f..b7f1aae8f 100644
--- a/crates/hir_expand/src/builtin_derive.rs
+++ b/crates/hir_expand/src/builtin_derive.rs
@@ -102,7 +102,7 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> {
102 debug!("name token not found"); 102 debug!("name token not found");
103 mbe::ExpandError::ConversionError 103 mbe::ExpandError::ConversionError
104 })?; 104 })?;
105 let name_token = tt::Ident { id: name_token_id, text: name.text().clone() }; 105 let name_token = tt::Ident { id: name_token_id, text: name.text().into() };
106 let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count()); 106 let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count());
107 Ok(BasicAdtInfo { name: name_token, type_params }) 107 Ok(BasicAdtInfo { name: name_token, type_params })
108} 108}
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs
index 467516eb7..cb6e23320 100644
--- a/crates/hir_expand/src/db.rs
+++ b/crates/hir_expand/src/db.rs
@@ -173,7 +173,7 @@ fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
173 }; 173 };
174 let loc = db.lookup_intern_macro(id); 174 let loc = db.lookup_intern_macro(id);
175 let arg = loc.kind.arg(db)?; 175 let arg = loc.kind.arg(db)?;
176 Some(arg.green().clone()) 176 Some(arg.green().to_owned())
177} 177}
178 178
179fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { 179fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
diff --git a/crates/hir_expand/src/name.rs b/crates/hir_expand/src/name.rs
index 95d853b6d..d692cec14 100644
--- a/crates/hir_expand/src/name.rs
+++ b/crates/hir_expand/src/name.rs
@@ -38,7 +38,7 @@ impl Name {
38 } 38 }
39 39
40 pub fn new_lifetime(lt: &ast::Lifetime) -> Name { 40 pub fn new_lifetime(lt: &ast::Lifetime) -> Name {
41 Self::new_text(lt.text().clone()) 41 Self::new_text(lt.text().into())
42 } 42 }
43 43
44 /// Shortcut to create inline plain text name 44 /// Shortcut to create inline plain text name
@@ -47,12 +47,12 @@ impl Name {
47 } 47 }
48 48
49 /// Resolve a name from the text of token. 49 /// Resolve a name from the text of token.
50 fn resolve(raw_text: &SmolStr) -> Name { 50 fn resolve(raw_text: &str) -> Name {
51 let raw_start = "r#"; 51 let raw_start = "r#";
52 if raw_text.as_str().starts_with(raw_start) { 52 if raw_text.starts_with(raw_start) {
53 Name::new_text(SmolStr::new(&raw_text[raw_start.len()..])) 53 Name::new_text(SmolStr::new(&raw_text[raw_start.len()..]))
54 } else { 54 } else {
55 Name::new_text(raw_text.clone()) 55 Name::new_text(raw_text.into())
56 } 56 }
57 } 57 }
58 58
diff --git a/crates/hir_ty/src/diagnostics.rs b/crates/hir_ty/src/diagnostics.rs
index c67a289f2..247da43f2 100644
--- a/crates/hir_ty/src/diagnostics.rs
+++ b/crates/hir_ty/src/diagnostics.rs
@@ -409,7 +409,7 @@ mod tests {
409 let crate_def_map = self.crate_def_map(krate); 409 let crate_def_map = self.crate_def_map(krate);
410 410
411 let mut fns = Vec::new(); 411 let mut fns = Vec::new();
412 for (module_id, _) in crate_def_map.modules.iter() { 412 for (module_id, _) in crate_def_map.modules() {
413 for decl in crate_def_map[module_id].scope.declarations() { 413 for decl in crate_def_map[module_id].scope.declarations() {
414 let mut sink = DiagnosticSinkBuilder::new().build(&mut cb); 414 let mut sink = DiagnosticSinkBuilder::new().build(&mut cb);
415 validate_module_item(self, krate, decl, &mut sink); 415 validate_module_item(self, krate, decl, &mut sink);
diff --git a/crates/hir_ty/src/method_resolution.rs b/crates/hir_ty/src/method_resolution.rs
index 8a289f52a..f06aeeb42 100644
--- a/crates/hir_ty/src/method_resolution.rs
+++ b/crates/hir_ty/src/method_resolution.rs
@@ -112,7 +112,7 @@ impl TraitImpls {
112 let mut impls = Self { map: FxHashMap::default() }; 112 let mut impls = Self { map: FxHashMap::default() };
113 113
114 let crate_def_map = db.crate_def_map(krate); 114 let crate_def_map = db.crate_def_map(krate);
115 for (_module_id, module_data) in crate_def_map.modules.iter() { 115 for (_module_id, module_data) in crate_def_map.modules() {
116 for impl_id in module_data.scope.impls() { 116 for impl_id in module_data.scope.impls() {
117 let target_trait = match db.impl_trait(impl_id) { 117 let target_trait = match db.impl_trait(impl_id) {
118 Some(tr) => tr.value.trait_, 118 Some(tr) => tr.value.trait_,
@@ -198,7 +198,7 @@ impl InherentImpls {
198 let mut map: FxHashMap<_, Vec<_>> = FxHashMap::default(); 198 let mut map: FxHashMap<_, Vec<_>> = FxHashMap::default();
199 199
200 let crate_def_map = db.crate_def_map(krate); 200 let crate_def_map = db.crate_def_map(krate);
201 for (_module_id, module_data) in crate_def_map.modules.iter() { 201 for (_module_id, module_data) in crate_def_map.modules() {
202 for impl_id in module_data.scope.impls() { 202 for impl_id in module_data.scope.impls() {
203 let data = db.impl_data(impl_id); 203 let data = db.impl_data(impl_id);
204 if data.target_trait.is_some() { 204 if data.target_trait.is_some() {
diff --git a/crates/hir_ty/src/test_db.rs b/crates/hir_ty/src/test_db.rs
index 646e16bbe..3bbcbc242 100644
--- a/crates/hir_ty/src/test_db.rs
+++ b/crates/hir_ty/src/test_db.rs
@@ -81,7 +81,7 @@ impl TestDB {
81 pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId { 81 pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
82 for &krate in self.relevant_crates(file_id).iter() { 82 for &krate in self.relevant_crates(file_id).iter() {
83 let crate_def_map = self.crate_def_map(krate); 83 let crate_def_map = self.crate_def_map(krate);
84 for (local_id, data) in crate_def_map.modules.iter() { 84 for (local_id, data) in crate_def_map.modules() {
85 if data.origin.file_id() == Some(file_id) { 85 if data.origin.file_id() == Some(file_id) {
86 return ModuleId { krate, local_id }; 86 return ModuleId { krate, local_id };
87 } 87 }
@@ -95,7 +95,7 @@ impl TestDB {
95 let crate_graph = self.crate_graph(); 95 let crate_graph = self.crate_graph();
96 for krate in crate_graph.iter() { 96 for krate in crate_graph.iter() {
97 let crate_def_map = self.crate_def_map(krate); 97 let crate_def_map = self.crate_def_map(krate);
98 for (module_id, _) in crate_def_map.modules.iter() { 98 for (module_id, _) in crate_def_map.modules() {
99 let file_id = crate_def_map[module_id].origin.file_id(); 99 let file_id = crate_def_map[module_id].origin.file_id();
100 files.extend(file_id) 100 files.extend(file_id)
101 } 101 }
diff --git a/crates/ide/src/display/navigation_target.rs b/crates/ide/src/display/navigation_target.rs
index 00e601244..671aa1373 100644
--- a/crates/ide/src/display/navigation_target.rs
+++ b/crates/ide/src/display/navigation_target.rs
@@ -153,8 +153,7 @@ impl NavigationTarget {
153 node: InFile<&dyn ast::NameOwner>, 153 node: InFile<&dyn ast::NameOwner>,
154 kind: SymbolKind, 154 kind: SymbolKind,
155 ) -> NavigationTarget { 155 ) -> NavigationTarget {
156 let name = 156 let name = node.value.name().map(|it| it.text().into()).unwrap_or_else(|| "_".into());
157 node.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_"));
158 let focus_range = 157 let focus_range =
159 node.value.name().map(|it| node.with_value(it.syntax()).original_file_range(db).range); 158 node.value.name().map(|it| node.with_value(it.syntax()).original_file_range(db).range);
160 let frange = node.map(|it| it.syntax()).original_file_range(db); 159 let frange = node.map(|it| it.syntax()).original_file_range(db);
diff --git a/crates/ide/src/display/short_label.rs b/crates/ide/src/display/short_label.rs
index 990f740b8..b8e4cc181 100644
--- a/crates/ide/src/display/short_label.rs
+++ b/crates/ide/src/display/short_label.rs
@@ -90,7 +90,7 @@ impl ShortLabel for ast::Variant {
90impl ShortLabel for ast::ConstParam { 90impl ShortLabel for ast::ConstParam {
91 fn short_label(&self) -> Option<String> { 91 fn short_label(&self) -> Option<String> {
92 let mut buf = "const ".to_owned(); 92 let mut buf = "const ".to_owned();
93 buf.push_str(self.name()?.text().as_str()); 93 buf.push_str(self.name()?.text());
94 if let Some(type_ref) = self.ty() { 94 if let Some(type_ref) = self.ty() {
95 format_to!(buf, ": {}", type_ref.syntax()); 95 format_to!(buf, ": {}", type_ref.syntax());
96 } 96 }
@@ -117,6 +117,6 @@ where
117{ 117{
118 let mut buf = node.visibility().map(|v| format!("{} ", v.syntax())).unwrap_or_default(); 118 let mut buf = node.visibility().map(|v| format!("{} ", v.syntax())).unwrap_or_default();
119 buf.push_str(label); 119 buf.push_str(label);
120 buf.push_str(node.name()?.text().as_str()); 120 buf.push_str(node.name()?.text());
121 Some(buf) 121 Some(buf)
122} 122}
diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs
index 17a540972..2d722dee0 100644
--- a/crates/ide/src/extend_selection.rs
+++ b/crates/ide/src/extend_selection.rs
@@ -213,8 +213,8 @@ fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextSize) -> TextRange
213 let ws_text = ws.text(); 213 let ws_text = ws.text();
214 let suffix = TextRange::new(offset, ws.text_range().end()) - ws.text_range().start(); 214 let suffix = TextRange::new(offset, ws.text_range().end()) - ws.text_range().start();
215 let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start(); 215 let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start();
216 let ws_suffix = &ws_text.as_str()[suffix]; 216 let ws_suffix = &ws_text[suffix];
217 let ws_prefix = &ws_text.as_str()[prefix]; 217 let ws_prefix = &ws_text[prefix];
218 if ws_text.contains('\n') && !ws_suffix.contains('\n') { 218 if ws_text.contains('\n') && !ws_suffix.contains('\n') {
219 if let Some(node) = ws.next_sibling_or_token() { 219 if let Some(node) = ws.next_sibling_or_token() {
220 let start = match ws_prefix.rfind('\n') { 220 let start = match ws_prefix.rfind('\n') {
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs
index a2039fcc7..54485fd30 100644
--- a/crates/ide/src/inlay_hints.rs
+++ b/crates/ide/src/inlay_hints.rs
@@ -411,7 +411,7 @@ fn get_string_representation(expr: &ast::Expr) -> Option<String> {
411 match expr { 411 match expr {
412 ast::Expr::MethodCallExpr(method_call_expr) => { 412 ast::Expr::MethodCallExpr(method_call_expr) => {
413 let name_ref = method_call_expr.name_ref()?; 413 let name_ref = method_call_expr.name_ref()?;
414 match name_ref.text().as_str() { 414 match name_ref.text() {
415 "clone" => method_call_expr.receiver().map(|rec| rec.to_string()), 415 "clone" => method_call_expr.receiver().map(|rec| rec.to_string()),
416 name_ref => Some(name_ref.to_owned()), 416 name_ref => Some(name_ref.to_owned()),
417 } 417 }
diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs
index 981467c8d..631bde0f1 100644
--- a/crates/ide/src/join_lines.rs
+++ b/crates/ide/src/join_lines.rs
@@ -59,7 +59,7 @@ fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextS
59 // The node is either the first or the last in the file 59 // The node is either the first or the last in the file
60 let suff = &token.text()[TextRange::new( 60 let suff = &token.text()[TextRange::new(
61 offset - token.text_range().start() + TextSize::of('\n'), 61 offset - token.text_range().start() + TextSize::of('\n'),
62 TextSize::of(token.text().as_str()), 62 TextSize::of(token.text()),
63 )]; 63 )];
64 let spaces = suff.bytes().take_while(|&b| b == b' ').count(); 64 let spaces = suff.bytes().take_while(|&b| b == b' ').count();
65 65
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs
index 8976f1080..47a85dc45 100644
--- a/crates/ide/src/runnables.rs
+++ b/crates/ide/src/runnables.rs
@@ -9,6 +9,7 @@ use syntax::{
9 ast::{self, AstNode, AttrsOwner}, 9 ast::{self, AstNode, AttrsOwner},
10 match_ast, SyntaxNode, 10 match_ast, SyntaxNode,
11}; 11};
12use test_utils::mark;
12 13
13use crate::{ 14use crate::{
14 display::{ToNav, TryToNav}, 15 display::{ToNav, TryToNav},
@@ -96,28 +97,26 @@ impl Runnable {
96pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { 97pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
97 let sema = Semantics::new(db); 98 let sema = Semantics::new(db);
98 let module = match sema.to_module_def(file_id) { 99 let module = match sema.to_module_def(file_id) {
99 None => return vec![], 100 None => return Vec::new(),
100 Some(it) => it, 101 Some(it) => it,
101 }; 102 };
102 103
103 runnables_mod(&sema, module) 104 let mut res = Vec::new();
105 runnables_mod(&sema, &mut res, module);
106 res
104} 107}
105 108
106fn runnables_mod(sema: &Semantics<RootDatabase>, module: hir::Module) -> Vec<Runnable> { 109fn runnables_mod(sema: &Semantics<RootDatabase>, acc: &mut Vec<Runnable>, module: hir::Module) {
107 let mut res: Vec<Runnable> = module 110 acc.extend(module.declarations(sema.db).into_iter().filter_map(|def| {
108 .declarations(sema.db) 111 let runnable = match def {
109 .into_iter() 112 hir::ModuleDef::Module(it) => runnable_mod(&sema, it),
110 .filter_map(|def| { 113 hir::ModuleDef::Function(it) => runnable_fn(&sema, it),
111 let runnable = match def { 114 _ => None,
112 hir::ModuleDef::Module(it) => runnable_mod(&sema, it), 115 };
113 hir::ModuleDef::Function(it) => runnable_fn(&sema, it), 116 runnable.or_else(|| module_def_doctest(&sema, def))
114 _ => None, 117 }));
115 };
116 runnable.or_else(|| module_def_doctest(&sema, def))
117 })
118 .collect();
119 118
120 res.extend(module.impl_defs(sema.db).into_iter().flat_map(|it| it.items(sema.db)).filter_map( 119 acc.extend(module.impl_defs(sema.db).into_iter().flat_map(|it| it.items(sema.db)).filter_map(
121 |def| match def { 120 |def| match def {
122 hir::AssocItem::Function(it) => { 121 hir::AssocItem::Function(it) => {
123 runnable_fn(&sema, it).or_else(|| module_def_doctest(&sema, it.into())) 122 runnable_fn(&sema, it).or_else(|| module_def_doctest(&sema, it.into()))
@@ -127,12 +126,14 @@ fn runnables_mod(sema: &Semantics<RootDatabase>, module: hir::Module) -> Vec<Run
127 }, 126 },
128 )); 127 ));
129 128
130 res.extend(module.declarations(sema.db).into_iter().flat_map(|def| match def { 129 for def in module.declarations(sema.db) {
131 hir::ModuleDef::Module(it) => runnables_mod(sema, it), 130 if let hir::ModuleDef::Module(submodule) = def {
132 _ => vec![], 131 match submodule.definition_source(sema.db).value {
133 })); 132 hir::ModuleSource::Module(_) => runnables_mod(sema, acc, submodule),
134 133 hir::ModuleSource::SourceFile(_) => mark::hit!(dont_recurse_in_outline_submodules),
135 res 134 }
135 }
136 }
136} 137}
137 138
138pub(crate) fn runnable_fn(sema: &Semantics<RootDatabase>, def: hir::Function) -> Option<Runnable> { 139pub(crate) fn runnable_fn(sema: &Semantics<RootDatabase>, def: hir::Function) -> Option<Runnable> {
@@ -326,6 +327,7 @@ fn has_test_function_or_multiple_test_submodules(
326#[cfg(test)] 327#[cfg(test)]
327mod tests { 328mod tests {
328 use expect_test::{expect, Expect}; 329 use expect_test::{expect, Expect};
330 use test_utils::mark;
329 331
330 use crate::fixture; 332 use crate::fixture;
331 333
@@ -1050,4 +1052,25 @@ mod tests {
1050 "#]], 1052 "#]],
1051 ); 1053 );
1052 } 1054 }
1055
1056 #[test]
1057 fn dont_recurse_in_outline_submodules() {
1058 mark::check!(dont_recurse_in_outline_submodules);
1059 check(
1060 r#"
1061//- /lib.rs
1062$0
1063mod m;
1064//- /m.rs
1065mod tests {
1066 #[test]
1067 fn t() {}
1068}
1069"#,
1070 &[],
1071 expect![[r#"
1072 []
1073 "#]],
1074 );
1075 }
1053} 1076}
diff --git a/crates/ide/src/syntax_highlighting/format.rs b/crates/ide/src/syntax_highlighting/format.rs
index a74ca844b..8a9b5ca8c 100644
--- a/crates/ide/src/syntax_highlighting/format.rs
+++ b/crates/ide/src/syntax_highlighting/format.rs
@@ -30,7 +30,7 @@ fn is_format_string(string: &ast::String) -> Option<()> {
30 let parent = string.syntax().parent(); 30 let parent = string.syntax().parent();
31 31
32 let name = parent.parent().and_then(ast::MacroCall::cast)?.path()?.segment()?.name_ref()?; 32 let name = parent.parent().and_then(ast::MacroCall::cast)?.path()?.segment()?.name_ref()?;
33 if !matches!(name.text().as_str(), "format_args" | "format_args_nl") { 33 if !matches!(name.text(), "format_args" | "format_args_nl") {
34 return None; 34 return None;
35 } 35 }
36 36
diff --git a/crates/ide/src/syntax_highlighting/inject.rs b/crates/ide/src/syntax_highlighting/inject.rs
index 281461493..8cdc3688f 100644
--- a/crates/ide/src/syntax_highlighting/inject.rs
+++ b/crates/ide/src/syntax_highlighting/inject.rs
@@ -116,7 +116,7 @@ pub(super) fn doc_comment(hl: &mut Highlights, node: &SyntaxNode) {
116 None => (), 116 None => (),
117 } 117 }
118 118
119 let line: &str = comment.text().as_str(); 119 let line: &str = comment.text();
120 let range = comment.syntax().text_range(); 120 let range = comment.syntax().text_range();
121 121
122 let mut pos = TextSize::of(comment.prefix()); 122 let mut pos = TextSize::of(comment.prefix());
diff --git a/crates/ide_db/src/defs.rs b/crates/ide_db/src/defs.rs
index d9875ffef..a8091dbee 100644
--- a/crates/ide_db/src/defs.rs
+++ b/crates/ide_db/src/defs.rs
@@ -343,7 +343,7 @@ impl NameRefClass {
343 hir::AssocItem::TypeAlias(it) => Some(*it), 343 hir::AssocItem::TypeAlias(it) => Some(*it),
344 _ => None, 344 _ => None,
345 }) 345 })
346 .find(|alias| alias.name(sema.db).to_string() == **name_ref.text()) 346 .find(|alias| &alias.name(sema.db).to_string() == name_ref.text())
347 { 347 {
348 return Some(NameRefClass::Definition(Definition::ModuleDef( 348 return Some(NameRefClass::Definition(Definition::ModuleDef(
349 ModuleDef::TypeAlias(ty), 349 ModuleDef::TypeAlias(ty),
diff --git a/crates/ide_db/src/helpers/insert_use.rs b/crates/ide_db/src/helpers/insert_use.rs
index 877d4f1c7..fd4035198 100644
--- a/crates/ide_db/src/helpers/insert_use.rs
+++ b/crates/ide_db/src/helpers/insert_use.rs
@@ -507,7 +507,7 @@ impl ImportGroup {
507 PathSegmentKind::SelfKw => ImportGroup::ThisModule, 507 PathSegmentKind::SelfKw => ImportGroup::ThisModule,
508 PathSegmentKind::SuperKw => ImportGroup::SuperModule, 508 PathSegmentKind::SuperKw => ImportGroup::SuperModule,
509 PathSegmentKind::CrateKw => ImportGroup::ThisCrate, 509 PathSegmentKind::CrateKw => ImportGroup::ThisCrate,
510 PathSegmentKind::Name(name) => match name.text().as_str() { 510 PathSegmentKind::Name(name) => match name.text() {
511 "std" => ImportGroup::Std, 511 "std" => ImportGroup::Std,
512 "core" => ImportGroup::Std, 512 "core" => ImportGroup::Std,
513 _ => ImportGroup::ExternCrate, 513 _ => ImportGroup::ExternCrate,
diff --git a/crates/ide_db/src/symbol_index.rs b/crates/ide_db/src/symbol_index.rs
index 0aa6a0765..e954bd72e 100644
--- a/crates/ide_db/src/symbol_index.rs
+++ b/crates/ide_db/src/symbol_index.rs
@@ -191,7 +191,7 @@ pub fn crate_symbols(db: &RootDatabase, krate: CrateId, query: Query) -> Vec<Fil
191 191
192 let def_map = db.crate_def_map(krate); 192 let def_map = db.crate_def_map(krate);
193 let mut files = Vec::new(); 193 let mut files = Vec::new();
194 let mut modules = vec![def_map.root]; 194 let mut modules = vec![def_map.root()];
195 while let Some(module) = modules.pop() { 195 while let Some(module) = modules.pop() {
196 let data = &def_map[module]; 196 let data = &def_map[module];
197 files.extend(data.origin.file_id()); 197 files.extend(data.origin.file_id());
@@ -209,7 +209,7 @@ pub fn crate_symbols(db: &RootDatabase, krate: CrateId, query: Query) -> Vec<Fil
209 query.search(&buf) 209 query.search(&buf)
210} 210}
211 211
212pub fn index_resolve(db: &RootDatabase, name: &SmolStr) -> Vec<FileSymbol> { 212pub fn index_resolve(db: &RootDatabase, name: &str) -> Vec<FileSymbol> {
213 let mut query = Query::new(name.to_string()); 213 let mut query = Query::new(name.to_string());
214 query.exact(); 214 query.exact();
215 query.limit(4); 215 query.limit(4);
@@ -409,7 +409,7 @@ fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
409 fn decl<N: NameOwner>(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { 409 fn decl<N: NameOwner>(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
410 let name = node.name()?; 410 let name = node.name()?;
411 let name_range = name.syntax().text_range(); 411 let name_range = name.syntax().text_range();
412 let name = name.text().clone(); 412 let name = name.text().into();
413 let ptr = SyntaxNodePtr::new(node.syntax()); 413 let ptr = SyntaxNodePtr::new(node.syntax());
414 414
415 Some((name, ptr, name_range)) 415 Some((name, ptr, name_range))
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index 51002e7b8..0cdc175be 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -507,7 +507,7 @@ impl SrcToken for SynToken {
507 } 507 }
508 } 508 }
509 fn to_text(&self) -> SmolStr { 509 fn to_text(&self) -> SmolStr {
510 self.token().text().clone() 510 self.token().text().into()
511 } 511 }
512} 512}
513 513
@@ -682,10 +682,8 @@ impl<'a> TreeSink for TtTreeSink<'a> {
682 self.text_pos += TextSize::of(text); 682 self.text_pos += TextSize::of(text);
683 } 683 }
684 684
685 let text = SmolStr::new(self.buf.as_str()); 685 self.inner.token(kind, self.buf.as_str());
686 self.buf.clear(); 686 self.buf.clear();
687 self.inner.token(kind, text);
688
689 // Add whitespace between adjoint puncts 687 // Add whitespace between adjoint puncts
690 let next = last.bump(); 688 let next = last.bump();
691 if let ( 689 if let (
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index 3cb45b030..268c00942 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -24,7 +24,7 @@ jod-thread = "0.1.0"
24log = "0.4.8" 24log = "0.4.8"
25lsp-types = { version = "0.86.0", features = ["proposed"] } 25lsp-types = { version = "0.86.0", features = ["proposed"] }
26parking_lot = "0.11.0" 26parking_lot = "0.11.0"
27pico-args = "0.3.1" 27pico-args = "0.4.0"
28oorandom = "11.1.2" 28oorandom = "11.1.2"
29rustc-hash = "1.1.0" 29rustc-hash = "1.1.0"
30serde = { version = "1.0.106", features = ["derive"] } 30serde = { version = "1.0.106", features = ["derive"] }
diff --git a/crates/rust-analyzer/src/bin/args.rs b/crates/rust-analyzer/src/bin/args.rs
index 0a471154e..7d917946e 100644
--- a/crates/rust-analyzer/src/bin/args.rs
+++ b/crates/rust-analyzer/src/bin/args.rs
@@ -109,7 +109,7 @@ impl Args {
109 let mut matches = Arguments::from_env(); 109 let mut matches = Arguments::from_env();
110 110
111 if matches.contains("--version") { 111 if matches.contains("--version") {
112 matches.finish()?; 112 finish_args(matches)?;
113 return Ok(Args { 113 return Ok(Args {
114 verbosity: Verbosity::Normal, 114 verbosity: Verbosity::Normal,
115 log_file: None, 115 log_file: None,
@@ -143,7 +143,7 @@ impl Args {
143 let subcommand = match matches.subcommand()? { 143 let subcommand = match matches.subcommand()? {
144 Some(it) => it, 144 Some(it) => it,
145 None => { 145 None => {
146 matches.finish()?; 146 finish_args(matches)?;
147 return Ok(Args { verbosity, log_file, command: Command::RunServer }); 147 return Ok(Args { verbosity, log_file, command: Command::RunServer });
148 } 148 }
149 }; 149 };
@@ -160,7 +160,7 @@ impl Args {
160 load_output_dirs: matches.contains("--load-output-dirs"), 160 load_output_dirs: matches.contains("--load-output-dirs"),
161 with_proc_macro: matches.contains("--with-proc-macro"), 161 with_proc_macro: matches.contains("--with-proc-macro"),
162 path: matches 162 path: matches
163 .free_from_str()? 163 .opt_free_from_str()?
164 .ok_or_else(|| format_err!("expected positional argument"))?, 164 .ok_or_else(|| format_err!("expected positional argument"))?,
165 }), 165 }),
166 "analysis-bench" => Command::Bench(BenchCmd { 166 "analysis-bench" => Command::Bench(BenchCmd {
@@ -187,21 +187,21 @@ impl Args {
187 load_output_dirs: matches.contains("--load-output-dirs"), 187 load_output_dirs: matches.contains("--load-output-dirs"),
188 with_proc_macro: matches.contains("--with-proc-macro"), 188 with_proc_macro: matches.contains("--with-proc-macro"),
189 path: matches 189 path: matches
190 .free_from_str()? 190 .opt_free_from_str()?
191 .ok_or_else(|| format_err!("expected positional argument"))?, 191 .ok_or_else(|| format_err!("expected positional argument"))?,
192 }), 192 }),
193 "diagnostics" => Command::Diagnostics { 193 "diagnostics" => Command::Diagnostics {
194 load_output_dirs: matches.contains("--load-output-dirs"), 194 load_output_dirs: matches.contains("--load-output-dirs"),
195 with_proc_macro: matches.contains("--with-proc-macro"), 195 with_proc_macro: matches.contains("--with-proc-macro"),
196 path: matches 196 path: matches
197 .free_from_str()? 197 .opt_free_from_str()?
198 .ok_or_else(|| format_err!("expected positional argument"))?, 198 .ok_or_else(|| format_err!("expected positional argument"))?,
199 }, 199 },
200 "proc-macro" => Command::ProcMacro, 200 "proc-macro" => Command::ProcMacro,
201 "ssr" => Command::Ssr { 201 "ssr" => Command::Ssr {
202 rules: { 202 rules: {
203 let mut acc = Vec::new(); 203 let mut acc = Vec::new();
204 while let Some(rule) = matches.free_from_str()? { 204 while let Some(rule) = matches.opt_free_from_str()? {
205 acc.push(rule); 205 acc.push(rule);
206 } 206 }
207 acc 207 acc
@@ -211,7 +211,7 @@ impl Args {
211 debug_snippet: matches.opt_value_from_str("--debug")?, 211 debug_snippet: matches.opt_value_from_str("--debug")?,
212 patterns: { 212 patterns: {
213 let mut acc = Vec::new(); 213 let mut acc = Vec::new();
214 while let Some(rule) = matches.free_from_str()? { 214 while let Some(rule) = matches.opt_free_from_str()? {
215 acc.push(rule); 215 acc.push(rule);
216 } 216 }
217 acc 217 acc
@@ -222,7 +222,14 @@ impl Args {
222 return Ok(Args { verbosity, log_file: None, command: Command::Help }); 222 return Ok(Args { verbosity, log_file: None, command: Command::Help });
223 } 223 }
224 }; 224 };
225 matches.finish()?; 225 finish_args(matches)?;
226 Ok(Args { verbosity, log_file, command }) 226 Ok(Args { verbosity, log_file, command })
227 } 227 }
228} 228}
229
230fn finish_args(args: Arguments) -> Result<()> {
231 if !args.finish().is_empty() {
232 bail!("Unused arguments.");
233 }
234 Ok(())
235}
diff --git a/crates/ssr/src/matching.rs b/crates/ssr/src/matching.rs
index 42d313f91..df013bae9 100644
--- a/crates/ssr/src/matching.rs
+++ b/crates/ssr/src/matching.rs
@@ -10,8 +10,11 @@ use hir::Semantics;
10use ide_db::base_db::FileRange; 10use ide_db::base_db::FileRange;
11use rustc_hash::FxHashMap; 11use rustc_hash::FxHashMap;
12use std::{cell::Cell, iter::Peekable}; 12use std::{cell::Cell, iter::Peekable};
13use syntax::ast::{AstNode, AstToken};
14use syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken}; 13use syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken};
14use syntax::{
15 ast::{AstNode, AstToken},
16 SmolStr,
17};
15use test_utils::mark; 18use test_utils::mark;
16 19
17// Creates a match error. If we're currently attempting to match some code that we thought we were 20// Creates a match error. If we're currently attempting to match some code that we thought we were
@@ -398,11 +401,11 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
398 code: &SyntaxNode, 401 code: &SyntaxNode,
399 ) -> Result<(), MatchFailed> { 402 ) -> Result<(), MatchFailed> {
400 // Build a map keyed by field name. 403 // Build a map keyed by field name.
401 let mut fields_by_name = FxHashMap::default(); 404 let mut fields_by_name: FxHashMap<SmolStr, SyntaxNode> = FxHashMap::default();
402 for child in code.children() { 405 for child in code.children() {
403 if let Some(record) = ast::RecordExprField::cast(child.clone()) { 406 if let Some(record) = ast::RecordExprField::cast(child.clone()) {
404 if let Some(name) = record.field_name() { 407 if let Some(name) = record.field_name() {
405 fields_by_name.insert(name.text().clone(), child.clone()); 408 fields_by_name.insert(name.text().into(), child.clone());
406 } 409 }
407 } 410 }
408 } 411 }
@@ -473,9 +476,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
473 } 476 }
474 SyntaxElement::Node(n) => { 477 SyntaxElement::Node(n) => {
475 if let Some(first_token) = n.first_token() { 478 if let Some(first_token) = n.first_token() {
476 if Some(first_token.text().as_str()) 479 if Some(first_token.text()) == next_pattern_token.as_deref() {
477 == next_pattern_token.as_deref()
478 {
479 if let Some(SyntaxElement::Node(p)) = pattern.next() { 480 if let Some(SyntaxElement::Node(p)) = pattern.next() {
480 // We have a subtree that starts with the next token in our pattern. 481 // We have a subtree that starts with the next token in our pattern.
481 self.attempt_match_token_tree(phase, &p, &n)?; 482 self.attempt_match_token_tree(phase, &p, &n)?;
diff --git a/crates/ssr/src/replacing.rs b/crates/ssr/src/replacing.rs
index 7e7ce37bd..06a94a46c 100644
--- a/crates/ssr/src/replacing.rs
+++ b/crates/ssr/src/replacing.rs
@@ -173,7 +173,7 @@ impl ReplacementRenderer<'_> {
173 ); 173 );
174 } 174 }
175 } else { 175 } else {
176 self.out.push_str(token.text().as_str()); 176 self.out.push_str(token.text());
177 } 177 }
178 } 178 }
179 179
diff --git a/crates/ssr/src/resolving.rs b/crates/ssr/src/resolving.rs
index f5ceb5729..14e5a3b69 100644
--- a/crates/ssr/src/resolving.rs
+++ b/crates/ssr/src/resolving.rs
@@ -228,7 +228,7 @@ impl<'db> ResolutionScope<'db> {
228 None, 228 None,
229 |_ty, assoc_item| { 229 |_ty, assoc_item| {
230 let item_name = assoc_item.name(self.scope.db)?; 230 let item_name = assoc_item.name(self.scope.db)?;
231 if item_name.to_string().as_str() == name.text().as_str() { 231 if item_name.to_string().as_str() == name.text() {
232 Some(hir::PathResolution::AssocItem(assoc_item)) 232 Some(hir::PathResolution::AssocItem(assoc_item))
233 } else { 233 } else {
234 None 234 None
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml
index 52394b337..55b437a3a 100644
--- a/crates/syntax/Cargo.toml
+++ b/crates/syntax/Cargo.toml
@@ -12,15 +12,12 @@ doctest = false
12 12
13[dependencies] 13[dependencies]
14itertools = "0.10.0" 14itertools = "0.10.0"
15rowan = "0.10.3" 15rowan = "0.12"
16rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" } 16rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" }
17rustc-hash = "1.1.0" 17rustc-hash = "1.1.0"
18arrayvec = "0.5.1" 18arrayvec = "0.5.1"
19once_cell = "1.3.1" 19once_cell = "1.3.1"
20indexmap = "1.4.0" 20indexmap = "1.4.0"
21# This crate transitively depends on `smol_str` via `rowan`.
22# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here
23# to reduce number of compilations
24smol_str = { version = "0.1.15", features = ["serde"] } 21smol_str = { version = "0.1.15", features = ["serde"] }
25serde = { version = "1.0.106", features = ["derive"] } 22serde = { version = "1.0.106", features = ["derive"] }
26 23
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs
index 827ae78f9..2ff92f9f6 100644
--- a/crates/syntax/src/algo.rs
+++ b/crates/syntax/src/algo.rs
@@ -4,6 +4,7 @@ use std::{
4 fmt, 4 fmt,
5 hash::BuildHasherDefault, 5 hash::BuildHasherDefault,
6 ops::{self, RangeInclusive}, 6 ops::{self, RangeInclusive},
7 ptr,
7}; 8};
8 9
9use indexmap::IndexMap; 10use indexmap::IndexMap;
@@ -171,7 +172,7 @@ pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
171 && lhs.text_range().len() == rhs.text_range().len() 172 && lhs.text_range().len() == rhs.text_range().len()
172 && match (&lhs, &rhs) { 173 && match (&lhs, &rhs) {
173 (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => { 174 (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => {
174 lhs.green() == rhs.green() || lhs.text() == rhs.text() 175 ptr::eq(lhs.green(), rhs.green()) || lhs.text() == rhs.text()
175 } 176 }
176 (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(), 177 (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(),
177 _ => false, 178 _ => false,
@@ -566,7 +567,7 @@ impl<'a> SyntaxRewriter<'a> {
566 567
567fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { 568fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
568 match element { 569 match element {
569 NodeOrToken::Node(it) => NodeOrToken::Node(it.green().clone()), 570 NodeOrToken::Node(it) => NodeOrToken::Node(it.green().to_owned()),
570 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), 571 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()),
571 } 572 }
572} 573}
@@ -624,7 +625,7 @@ fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize {
624 625
625fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { 626fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
626 match element { 627 match element {
627 NodeOrToken::Node(it) => it.green().clone().into(), 628 NodeOrToken::Node(it) => it.green().to_owned().into(),
628 NodeOrToken::Token(it) => it.green().clone().into(), 629 NodeOrToken::Token(it) => it.green().clone().into(),
629 } 630 }
630} 631}
diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs
index 83de067d9..a25ff655e 100644
--- a/crates/syntax/src/ast.rs
+++ b/crates/syntax/src/ast.rs
@@ -12,7 +12,7 @@ use std::marker::PhantomData;
12 12
13use crate::{ 13use crate::{
14 syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken}, 14 syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken},
15 SmolStr, SyntaxKind, 15 SyntaxKind,
16}; 16};
17 17
18pub use self::{ 18pub use self::{
@@ -54,7 +54,7 @@ pub trait AstToken {
54 54
55 fn syntax(&self) -> &SyntaxToken; 55 fn syntax(&self) -> &SyntaxToken;
56 56
57 fn text(&self) -> &SmolStr { 57 fn text(&self) -> &str {
58 self.syntax().text() 58 self.syntax().text()
59 } 59 }
60} 60}
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index 9ffc3ae11..b755c9692 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -478,7 +478,7 @@ fn ast_from_text<N: AstNode>(text: &str) -> N {
478} 478}
479 479
480fn unroot(n: SyntaxNode) -> SyntaxNode { 480fn unroot(n: SyntaxNode) -> SyntaxNode {
481 SyntaxNode::new_root(n.green().clone()) 481 SyntaxNode::new_root(n.green().to_owned())
482} 482}
483 483
484pub mod tokens { 484pub mod tokens {
@@ -495,7 +495,7 @@ pub mod tokens {
495 .syntax() 495 .syntax()
496 .descendants_with_tokens() 496 .descendants_with_tokens()
497 .filter_map(|it| it.into_token()) 497 .filter_map(|it| it.into_token())
498 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ") 498 .find(|it| it.kind() == WHITESPACE && it.text() == " ")
499 .unwrap() 499 .unwrap()
500 } 500 }
501 501
@@ -523,7 +523,7 @@ pub mod tokens {
523 .syntax() 523 .syntax()
524 .descendants_with_tokens() 524 .descendants_with_tokens()
525 .filter_map(|it| it.into_token()) 525 .filter_map(|it| it.into_token())
526 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n") 526 .find(|it| it.kind() == WHITESPACE && it.text() == "\n")
527 .unwrap() 527 .unwrap()
528 } 528 }
529 529
@@ -533,7 +533,7 @@ pub mod tokens {
533 .syntax() 533 .syntax()
534 .descendants_with_tokens() 534 .descendants_with_tokens()
535 .filter_map(|it| it.into_token()) 535 .filter_map(|it| it.into_token())
536 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n\n") 536 .find(|it| it.kind() == WHITESPACE && it.text() == "\n\n")
537 .unwrap() 537 .unwrap()
538 } 538 }
539 539
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index 738c92a5b..5c8cf900f 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -13,19 +13,19 @@ use crate::{
13}; 13};
14 14
15impl ast::Lifetime { 15impl ast::Lifetime {
16 pub fn text(&self) -> &SmolStr { 16 pub fn text(&self) -> &str {
17 text_of_first_token(self.syntax()) 17 text_of_first_token(self.syntax())
18 } 18 }
19} 19}
20 20
21impl ast::Name { 21impl ast::Name {
22 pub fn text(&self) -> &SmolStr { 22 pub fn text(&self) -> &str {
23 text_of_first_token(self.syntax()) 23 text_of_first_token(self.syntax())
24 } 24 }
25} 25}
26 26
27impl ast::NameRef { 27impl ast::NameRef {
28 pub fn text(&self) -> &SmolStr { 28 pub fn text(&self) -> &str {
29 text_of_first_token(self.syntax()) 29 text_of_first_token(self.syntax())
30 } 30 }
31 31
@@ -34,7 +34,7 @@ impl ast::NameRef {
34 } 34 }
35} 35}
36 36
37fn text_of_first_token(node: &SyntaxNode) -> &SmolStr { 37fn text_of_first_token(node: &SyntaxNode) -> &str {
38 node.green().children().next().and_then(|it| it.into_token()).unwrap().text() 38 node.green().children().next().and_then(|it| it.into_token()).unwrap().text()
39} 39}
40 40
@@ -121,7 +121,7 @@ impl ast::Attr {
121 pub fn simple_name(&self) -> Option<SmolStr> { 121 pub fn simple_name(&self) -> Option<SmolStr> {
122 let path = self.path()?; 122 let path = self.path()?;
123 match (path.segment(), path.qualifier()) { 123 match (path.segment(), path.qualifier()) {
124 (Some(segment), None) => Some(segment.syntax().first_token()?.text().clone()), 124 (Some(segment), None) => Some(segment.syntax().first_token()?.text().into()),
125 _ => None, 125 _ => None,
126 } 126 }
127 } 127 }
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs
index 5e9620a40..5e07ec7d1 100644
--- a/crates/syntax/src/ast/token_ext.rs
+++ b/crates/syntax/src/ast/token_ext.rs
@@ -41,7 +41,7 @@ impl ast::Comment {
41 match kind { 41 match kind {
42 CommentKind { shape, doc: Some(_) } => { 42 CommentKind { shape, doc: Some(_) } => {
43 let prefix = kind.prefix(); 43 let prefix = kind.prefix();
44 let text = &self.text().as_str()[prefix.len()..]; 44 let text = &self.text()[prefix.len()..];
45 let ws = text.chars().next().filter(|c| c.is_whitespace()); 45 let ws = text.chars().next().filter(|c| c.is_whitespace());
46 let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]); 46 let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]);
47 match shape { 47 match shape {
@@ -156,13 +156,13 @@ impl ast::String {
156 156
157 pub fn value(&self) -> Option<Cow<'_, str>> { 157 pub fn value(&self) -> Option<Cow<'_, str>> {
158 if self.is_raw() { 158 if self.is_raw() {
159 let text = self.text().as_str(); 159 let text = self.text();
160 let text = 160 let text =
161 &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; 161 &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
162 return Some(Cow::Borrowed(text)); 162 return Some(Cow::Borrowed(text));
163 } 163 }
164 164
165 let text = self.text().as_str(); 165 let text = self.text();
166 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; 166 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
167 167
168 let mut buf = String::new(); 168 let mut buf = String::new();
@@ -190,7 +190,7 @@ impl ast::String {
190 } 190 }
191 191
192 pub fn quote_offsets(&self) -> Option<QuoteOffsets> { 192 pub fn quote_offsets(&self) -> Option<QuoteOffsets> {
193 let text = self.text().as_str(); 193 let text = self.text();
194 let offsets = QuoteOffsets::new(text)?; 194 let offsets = QuoteOffsets::new(text)?;
195 let o = self.syntax().text_range().start(); 195 let o = self.syntax().text_range().start();
196 let offsets = QuoteOffsets { 196 let offsets = QuoteOffsets {
@@ -560,7 +560,7 @@ impl HasFormatSpecifier for ast::String {
560 fn char_ranges( 560 fn char_ranges(
561 &self, 561 &self,
562 ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> { 562 ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> {
563 let text = self.text().as_str(); 563 let text = self.text();
564 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; 564 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
565 let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start(); 565 let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start();
566 566
@@ -590,7 +590,7 @@ impl ast::IntNumber {
590 pub fn value(&self) -> Option<u128> { 590 pub fn value(&self) -> Option<u128> {
591 let token = self.syntax(); 591 let token = self.syntax();
592 592
593 let mut text = token.text().as_str(); 593 let mut text = token.text();
594 if let Some(suffix) = self.suffix() { 594 if let Some(suffix) = self.suffix() {
595 text = &text[..text.len() - suffix.len()] 595 text = &text[..text.len() - suffix.len()]
596 } 596 }
diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs
index ea7482bb1..11294c5b2 100644
--- a/crates/syntax/src/lib.rs
+++ b/crates/syntax/src/lib.rs
@@ -56,9 +56,9 @@ pub use crate::{
56}; 56};
57pub use parser::{SyntaxKind, T}; 57pub use parser::{SyntaxKind, T};
58pub use rowan::{ 58pub use rowan::{
59 Direction, GreenNode, NodeOrToken, SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset, 59 Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent,
60 WalkEvent,
61}; 60};
61pub use smol_str::SmolStr;
62 62
63/// `Parse` is the result of the parsing: a syntax tree and a collection of 63/// `Parse` is the result of the parsing: a syntax tree and a collection of
64/// errors. 64/// errors.
diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs
index 76f01084c..3d637bf91 100644
--- a/crates/syntax/src/parsing/reparsing.rs
+++ b/crates/syntax/src/parsing/reparsing.rs
@@ -73,8 +73,7 @@ fn reparse_token<'node>(
73 new_text.pop(); 73 new_text.pop();
74 } 74 }
75 75
76 let new_token = 76 let new_token = GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), &new_text);
77 GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), new_text.into());
78 Some(( 77 Some((
79 prev_token.replace_with(new_token), 78 prev_token.replace_with(new_token),
80 new_err.into_iter().collect(), 79 new_err.into_iter().collect(),
diff --git a/crates/syntax/src/parsing/text_tree_sink.rs b/crates/syntax/src/parsing/text_tree_sink.rs
index ce27c3dd9..d5ddc076f 100644
--- a/crates/syntax/src/parsing/text_tree_sink.rs
+++ b/crates/syntax/src/parsing/text_tree_sink.rs
@@ -8,7 +8,7 @@ use crate::{
8 ast, 8 ast,
9 parsing::Token, 9 parsing::Token,
10 syntax_node::GreenNode, 10 syntax_node::GreenNode,
11 SmolStr, SyntaxError, 11 SyntaxError,
12 SyntaxKind::{self, *}, 12 SyntaxKind::{self, *},
13 SyntaxTreeBuilder, TextRange, TextSize, 13 SyntaxTreeBuilder, TextRange, TextSize,
14}; 14};
@@ -135,7 +135,7 @@ impl<'a> TextTreeSink<'a> {
135 135
136 fn do_token(&mut self, kind: SyntaxKind, len: TextSize, n_tokens: usize) { 136 fn do_token(&mut self, kind: SyntaxKind, len: TextSize, n_tokens: usize) {
137 let range = TextRange::at(self.text_pos, len); 137 let range = TextRange::at(self.text_pos, len);
138 let text: SmolStr = self.text[range].into(); 138 let text = &self.text[range];
139 self.text_pos += len; 139 self.text_pos += len;
140 self.token_pos += n_tokens; 140 self.token_pos += n_tokens;
141 self.inner.token(kind, text); 141 self.inner.token(kind, text);
diff --git a/crates/syntax/src/syntax_node.rs b/crates/syntax/src/syntax_node.rs
index cc30138fa..8f643b228 100644
--- a/crates/syntax/src/syntax_node.rs
+++ b/crates/syntax/src/syntax_node.rs
@@ -8,7 +8,7 @@
8 8
9use rowan::{GreenNodeBuilder, Language}; 9use rowan::{GreenNodeBuilder, Language};
10 10
11use crate::{Parse, SmolStr, SyntaxError, SyntaxKind, TextSize}; 11use crate::{Parse, SyntaxError, SyntaxKind, TextSize};
12 12
13pub(crate) use rowan::{GreenNode, GreenToken, NodeOrToken}; 13pub(crate) use rowan::{GreenNode, GreenToken, NodeOrToken};
14 14
@@ -53,7 +53,7 @@ impl SyntaxTreeBuilder {
53 Parse::new(green, errors) 53 Parse::new(green, errors)
54 } 54 }
55 55
56 pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) { 56 pub fn token(&mut self, kind: SyntaxKind, text: &str) {
57 let kind = RustLanguage::kind_to_raw(kind); 57 let kind = RustLanguage::kind_to_raw(kind);
58 self.inner.token(kind, text) 58 self.inner.token(kind, text)
59 } 59 }
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs
index 7901580ee..7694e8834 100644
--- a/crates/syntax/src/validation.rs
+++ b/crates/syntax/src/validation.rs
@@ -116,7 +116,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
116 } 116 }
117 117
118 let token = literal.token(); 118 let token = literal.token();
119 let text = token.text().as_str(); 119 let text = token.text();
120 120
121 // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205) 121 // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205)
122 let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| { 122 let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| {
diff --git a/docs/dev/style.md b/docs/dev/style.md
index 21330948b..389649398 100644
--- a/docs/dev/style.md
+++ b/docs/dev/style.md
@@ -280,6 +280,9 @@ Prefer `Default` even it has to be implemented manually.
280 280
281**Rationale:** less typing in the common case, uniformity. 281**Rationale:** less typing in the common case, uniformity.
282 282
283Use `Vec::new` rather than `vec![]`. **Rationale:** uniformity, strength
284reduction.
285
283## Functions Over Objects 286## Functions Over Objects
284 287
285Avoid creating "doer" objects. 288Avoid creating "doer" objects.
@@ -418,12 +421,44 @@ fn frobnicate(s: &str) {
418**Rationale:** reveals the costs. 421**Rationale:** reveals the costs.
419It is also more efficient when the caller already owns the allocation. 422It is also more efficient when the caller already owns the allocation.
420 423
421## Collection types 424## Collection Types
422 425
423Prefer `rustc_hash::FxHashMap` and `rustc_hash::FxHashSet` instead of the ones in `std::collections`. 426Prefer `rustc_hash::FxHashMap` and `rustc_hash::FxHashSet` instead of the ones in `std::collections`.
424 427
425**Rationale:** they use a hasher that's significantly faster and using them consistently will reduce code size by some small amount. 428**Rationale:** they use a hasher that's significantly faster and using them consistently will reduce code size by some small amount.
426 429
430## Avoid Intermediate Collections
431
432When writing a recursive function to compute a sets of things, use an accumulator parameter instead of returning a fresh collection.
433Accumulator goes first in the list of arguments.
434
435```rust
436// GOOD
437pub fn reachable_nodes(node: Node) -> FxHashSet<Node> {
438 let mut res = FxHashSet::default();
439 go(&mut res, node);
440 res
441}
442fn go(acc: &mut FxHashSet<Node>, node: Node) {
443 acc.insert(node);
444 for n in node.neighbors() {
445 go(acc, n);
446 }
447}
448
449// BAD
450pub fn reachable_nodes(node: Node) -> FxHashSet<Node> {
451 let mut res = FxHashSet::default();
452 res.insert(node);
453 for n in node.neighbors() {
454 res.extend(reachable_nodes(n));
455 }
456 res
457}
458```
459
460**Rational:** re-use allocations, accumulator style is more concise for complex cases.
461
427# Style 462# Style
428 463
429## Order of Imports 464## Order of Imports
diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml
index 4abc7b053..4e6b439fd 100644
--- a/xtask/Cargo.toml
+++ b/xtask/Cargo.toml
@@ -12,7 +12,7 @@ doctest = false
12[dependencies] 12[dependencies]
13anyhow = "1.0.26" 13anyhow = "1.0.26"
14flate2 = "1.0" 14flate2 = "1.0"
15pico-args = "0.3.1" 15pico-args = "0.4.0"
16proc-macro2 = "1.0.8" 16proc-macro2 = "1.0.8"
17quote = "1.0.2" 17quote = "1.0.2"
18ungrammar = "1.9" 18ungrammar = "1.9"
diff --git a/xtask/src/main.rs b/xtask/src/main.rs
index c3e5c7326..5a99f4a76 100644
--- a/xtask/src/main.rs
+++ b/xtask/src/main.rs
@@ -10,6 +10,7 @@
10 10
11use std::env; 11use std::env;
12 12
13use anyhow::bail;
13use codegen::CodegenCmd; 14use codegen::CodegenCmd;
14use pico_args::Arguments; 15use pico_args::Arguments;
15use xshell::{cmd, cp, pushd}; 16use xshell::{cmd, cp, pushd};
@@ -76,7 +77,7 @@ FLAGS:
76 77
77 let client_opt = args.opt_value_from_str("--client")?; 78 let client_opt = args.opt_value_from_str("--client")?;
78 79
79 args.finish()?; 80 finish_args(args)?;
80 81
81 InstallCmd { 82 InstallCmd {
82 client: if server { None } else { Some(client_opt.unwrap_or_default()) }, 83 client: if server { None } else { Some(client_opt.unwrap_or_default()) },
@@ -86,53 +87,53 @@ FLAGS:
86 } 87 }
87 "codegen" => { 88 "codegen" => {
88 let features = args.contains("--features"); 89 let features = args.contains("--features");
89 args.finish()?; 90 finish_args(args)?;
90 CodegenCmd { features }.run() 91 CodegenCmd { features }.run()
91 } 92 }
92 "format" => { 93 "format" => {
93 args.finish()?; 94 finish_args(args)?;
94 run_rustfmt(Mode::Overwrite) 95 run_rustfmt(Mode::Overwrite)
95 } 96 }
96 "install-pre-commit-hook" => { 97 "install-pre-commit-hook" => {
97 args.finish()?; 98 finish_args(args)?;
98 pre_commit::install_hook() 99 pre_commit::install_hook()
99 } 100 }
100 "lint" => { 101 "lint" => {
101 args.finish()?; 102 finish_args(args)?;
102 run_clippy() 103 run_clippy()
103 } 104 }
104 "fuzz-tests" => { 105 "fuzz-tests" => {
105 args.finish()?; 106 finish_args(args)?;
106 run_fuzzer() 107 run_fuzzer()
107 } 108 }
108 "pre-cache" => { 109 "pre-cache" => {
109 args.finish()?; 110 finish_args(args)?;
110 PreCacheCmd.run() 111 PreCacheCmd.run()
111 } 112 }
112 "release" => { 113 "release" => {
113 let dry_run = args.contains("--dry-run"); 114 let dry_run = args.contains("--dry-run");
114 args.finish()?; 115 finish_args(args)?;
115 ReleaseCmd { dry_run }.run() 116 ReleaseCmd { dry_run }.run()
116 } 117 }
117 "promote" => { 118 "promote" => {
118 let dry_run = args.contains("--dry-run"); 119 let dry_run = args.contains("--dry-run");
119 args.finish()?; 120 finish_args(args)?;
120 PromoteCmd { dry_run }.run() 121 PromoteCmd { dry_run }.run()
121 } 122 }
122 "dist" => { 123 "dist" => {
123 let nightly = args.contains("--nightly"); 124 let nightly = args.contains("--nightly");
124 let client_version: Option<String> = args.opt_value_from_str("--client")?; 125 let client_version: Option<String> = args.opt_value_from_str("--client")?;
125 args.finish()?; 126 finish_args(args)?;
126 DistCmd { nightly, client_version }.run() 127 DistCmd { nightly, client_version }.run()
127 } 128 }
128 "metrics" => { 129 "metrics" => {
129 let dry_run = args.contains("--dry-run"); 130 let dry_run = args.contains("--dry-run");
130 args.finish()?; 131 finish_args(args)?;
131 MetricsCmd { dry_run }.run() 132 MetricsCmd { dry_run }.run()
132 } 133 }
133 "bb" => { 134 "bb" => {
134 let suffix: String = args.free_from_str()?.unwrap(); 135 let suffix: String = args.free_from_str()?;
135 args.finish()?; 136 finish_args(args)?;
136 cmd!("cargo build --release").run()?; 137 cmd!("cargo build --release").run()?;
137 cp("./target/release/rust-analyzer", format!("./target/rust-analyzer-{}", suffix))?; 138 cp("./target/release/rust-analyzer", format!("./target/rust-analyzer-{}", suffix))?;
138 Ok(()) 139 Ok(())
@@ -161,3 +162,10 @@ SUBCOMMANDS:
161 } 162 }
162 } 163 }
163} 164}
165
166fn finish_args(args: Arguments) -> Result<()> {
167 if !args.finish().is_empty() {
168 bail!("Unused arguments.");
169 }
170 Ok(())
171}