aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock35
-rw-r--r--crates/assists/src/handlers/generate_impl.rs11
-rw-r--r--crates/assists/src/handlers/generate_new.rs8
-rw-r--r--crates/assists/src/handlers/raw_string.rs2
-rw-r--r--crates/assists/src/handlers/replace_derive_with_manual_impl.rs19
-rw-r--r--crates/assists/src/utils.rs2
-rw-r--r--crates/completion/src/completions/unqualified_path.rs24
-rw-r--r--crates/hir/src/code_model.rs2
-rw-r--r--crates/hir/src/semantics.rs2
-rw-r--r--crates/hir_def/src/resolver.rs25
-rw-r--r--crates/hir_expand/src/builtin_derive.rs2
-rw-r--r--crates/hir_expand/src/db.rs2
-rw-r--r--crates/hir_expand/src/name.rs8
-rw-r--r--crates/ide/src/display/navigation_target.rs3
-rw-r--r--crates/ide/src/display/short_label.rs4
-rw-r--r--crates/ide/src/extend_selection.rs4
-rw-r--r--crates/ide/src/inlay_hints.rs2
-rw-r--r--crates/ide/src/join_lines.rs2
-rw-r--r--crates/ide/src/runnables.rs67
-rw-r--r--crates/ide/src/syntax_highlighting/format.rs2
-rw-r--r--crates/ide/src/syntax_highlighting/inject.rs2
-rw-r--r--crates/ide_db/src/defs.rs2
-rw-r--r--crates/ide_db/src/helpers/insert_use.rs2
-rw-r--r--crates/ide_db/src/symbol_index.rs4
-rw-r--r--crates/mbe/src/syntax_bridge.rs6
-rw-r--r--crates/ssr/src/matching.rs13
-rw-r--r--crates/ssr/src/replacing.rs2
-rw-r--r--crates/ssr/src/resolving.rs2
-rw-r--r--crates/syntax/Cargo.toml5
-rw-r--r--crates/syntax/src/algo.rs7
-rw-r--r--crates/syntax/src/ast.rs4
-rw-r--r--crates/syntax/src/ast/make.rs8
-rw-r--r--crates/syntax/src/ast/node_ext.rs10
-rw-r--r--crates/syntax/src/ast/token_ext.rs12
-rw-r--r--crates/syntax/src/lib.rs4
-rw-r--r--crates/syntax/src/parsing/reparsing.rs3
-rw-r--r--crates/syntax/src/parsing/text_tree_sink.rs4
-rw-r--r--crates/syntax/src/syntax_node.rs4
-rw-r--r--crates/syntax/src/validation.rs2
-rw-r--r--docs/dev/style.md37
40 files changed, 209 insertions, 150 deletions
diff --git a/Cargo.lock b/Cargo.lock
index b06f32913..4a5853a61 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -323,7 +323,7 @@ dependencies = [
323 "const_fn", 323 "const_fn",
324 "crossbeam-utils 0.8.1", 324 "crossbeam-utils 0.8.1",
325 "lazy_static", 325 "lazy_static",
326 "memoffset 0.6.1", 326 "memoffset",
327 "scopeguard", 327 "scopeguard",
328] 328]
329 329
@@ -942,15 +942,6 @@ dependencies = [
942 942
943[[package]] 943[[package]]
944name = "memoffset" 944name = "memoffset"
945version = "0.5.6"
946source = "registry+https://github.com/rust-lang/crates.io-index"
947checksum = "043175f069eda7b85febe4a74abbaeff828d9f8b448515d3151a14a3542811aa"
948dependencies = [
949 "autocfg",
950]
951
952[[package]]
953name = "memoffset"
954version = "0.6.1" 945version = "0.6.1"
955source = "registry+https://github.com/rust-lang/crates.io-index" 946source = "registry+https://github.com/rust-lang/crates.io-index"
956checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87" 947checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87"
@@ -1384,15 +1375,14 @@ checksum = "b5eb417147ba9860a96cfe72a0b93bf88fee1744b5636ec99ab20c1aa9376581"
1384 1375
1385[[package]] 1376[[package]]
1386name = "rowan" 1377name = "rowan"
1387version = "0.10.6" 1378version = "0.12.0"
1388source = "registry+https://github.com/rust-lang/crates.io-index" 1379source = "registry+https://github.com/rust-lang/crates.io-index"
1389checksum = "8a0734142c18710f7214dc21908e2f054e973b908dbb1a602a3e6691615aaaae" 1380checksum = "bea4527c692099becd37ec777cfd6949d0534348528d2fc84ee420d2d5fac83d"
1390dependencies = [ 1381dependencies = [
1391 "hashbrown", 1382 "hashbrown",
1383 "memoffset",
1392 "rustc-hash", 1384 "rustc-hash",
1393 "smol_str",
1394 "text-size", 1385 "text-size",
1395 "triomphe",
1396] 1386]
1397 1387
1398[[package]] 1388[[package]]
@@ -1639,12 +1629,6 @@ dependencies = [
1639] 1629]
1640 1630
1641[[package]] 1631[[package]]
1642name = "stable_deref_trait"
1643version = "1.2.0"
1644source = "registry+https://github.com/rust-lang/crates.io-index"
1645checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
1646
1647[[package]]
1648name = "stdx" 1632name = "stdx"
1649version = "0.0.0" 1633version = "0.0.0"
1650dependencies = [ 1634dependencies = [
@@ -1872,17 +1856,6 @@ dependencies = [
1872] 1856]
1873 1857
1874[[package]] 1858[[package]]
1875name = "triomphe"
1876version = "0.1.2"
1877source = "registry+https://github.com/rust-lang/crates.io-index"
1878checksum = "6e9d872053cf9e5a833d8c1dd772cdc38ab66a908129d6f73c049c986161d07c"
1879dependencies = [
1880 "memoffset 0.5.6",
1881 "serde",
1882 "stable_deref_trait",
1883]
1884
1885[[package]]
1886name = "tt" 1859name = "tt"
1887version = "0.0.0" 1860version = "0.0.0"
1888dependencies = [ 1861dependencies = [
diff --git a/crates/assists/src/handlers/generate_impl.rs b/crates/assists/src/handlers/generate_impl.rs
index 9af45192b..827477272 100644
--- a/crates/assists/src/handlers/generate_impl.rs
+++ b/crates/assists/src/handlers/generate_impl.rs
@@ -1,6 +1,9 @@
1use itertools::Itertools; 1use itertools::Itertools;
2use stdx::format_to; 2use stdx::format_to;
3use syntax::ast::{self, AstNode, AttrsOwner, GenericParamsOwner, NameOwner}; 3use syntax::{
4 ast::{self, AstNode, AttrsOwner, GenericParamsOwner, NameOwner},
5 SmolStr,
6};
4 7
5use crate::{AssistContext, AssistId, AssistKind, Assists}; 8use crate::{AssistContext, AssistId, AssistKind, Assists};
6 9
@@ -49,16 +52,16 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()
49 format_to!(buf, "{}", type_params.syntax()); 52 format_to!(buf, "{}", type_params.syntax());
50 } 53 }
51 buf.push_str(" "); 54 buf.push_str(" ");
52 buf.push_str(name.text().as_str()); 55 buf.push_str(name.text());
53 if let Some(type_params) = type_params { 56 if let Some(type_params) = type_params {
54 let lifetime_params = type_params 57 let lifetime_params = type_params
55 .lifetime_params() 58 .lifetime_params()
56 .filter_map(|it| it.lifetime()) 59 .filter_map(|it| it.lifetime())
57 .map(|it| it.text().clone()); 60 .map(|it| SmolStr::from(it.text()));
58 let type_params = type_params 61 let type_params = type_params
59 .type_params() 62 .type_params()
60 .filter_map(|it| it.name()) 63 .filter_map(|it| it.name())
61 .map(|it| it.text().clone()); 64 .map(|it| SmolStr::from(it.text()));
62 65
63 let generic_params = lifetime_params.chain(type_params).format(", "); 66 let generic_params = lifetime_params.chain(type_params).format(", ");
64 format_to!(buf, "<{}>", generic_params) 67 format_to!(buf, "<{}>", generic_params)
diff --git a/crates/assists/src/handlers/generate_new.rs b/crates/assists/src/handlers/generate_new.rs
index 5c52b2bc8..b7390855a 100644
--- a/crates/assists/src/handlers/generate_new.rs
+++ b/crates/assists/src/handlers/generate_new.rs
@@ -3,7 +3,7 @@ use itertools::Itertools;
3use stdx::format_to; 3use stdx::format_to;
4use syntax::{ 4use syntax::{
5 ast::{self, AstNode, GenericParamsOwner, NameOwner, StructKind, VisibilityOwner}, 5 ast::{self, AstNode, GenericParamsOwner, NameOwner, StructKind, VisibilityOwner},
6 T, 6 SmolStr, T,
7}; 7};
8 8
9use crate::{AssistContext, AssistId, AssistKind, Assists}; 9use crate::{AssistContext, AssistId, AssistKind, Assists};
@@ -95,14 +95,14 @@ fn generate_impl_text(strukt: &ast::Struct, code: &str) -> String {
95 format_to!(buf, "{}", type_params.syntax()); 95 format_to!(buf, "{}", type_params.syntax());
96 } 96 }
97 buf.push_str(" "); 97 buf.push_str(" ");
98 buf.push_str(strukt.name().unwrap().text().as_str()); 98 buf.push_str(strukt.name().unwrap().text());
99 if let Some(type_params) = type_params { 99 if let Some(type_params) = type_params {
100 let lifetime_params = type_params 100 let lifetime_params = type_params
101 .lifetime_params() 101 .lifetime_params()
102 .filter_map(|it| it.lifetime()) 102 .filter_map(|it| it.lifetime())
103 .map(|it| it.text().clone()); 103 .map(|it| SmolStr::from(it.text()));
104 let type_params = 104 let type_params =
105 type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone()); 105 type_params.type_params().filter_map(|it| it.name()).map(|it| SmolStr::from(it.text()));
106 format_to!(buf, "<{}>", lifetime_params.chain(type_params).format(", ")) 106 format_to!(buf, "<{}>", lifetime_params.chain(type_params).format(", "))
107 } 107 }
108 108
diff --git a/crates/assists/src/handlers/raw_string.rs b/crates/assists/src/handlers/raw_string.rs
index be963f162..d95267607 100644
--- a/crates/assists/src/handlers/raw_string.rs
+++ b/crates/assists/src/handlers/raw_string.rs
@@ -138,7 +138,7 @@ pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
138 return None; 138 return None;
139 } 139 }
140 140
141 let text = token.text().as_str(); 141 let text = token.text();
142 if !text.starts_with("r#") && text.ends_with('#') { 142 if !text.starts_with("r#") && text.ends_with('#') {
143 return None; 143 return None;
144 } 144 }
diff --git a/crates/assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/assists/src/handlers/replace_derive_with_manual_impl.rs
index bd4c1c806..6aa9d2f2c 100644
--- a/crates/assists/src/handlers/replace_derive_with_manual_impl.rs
+++ b/crates/assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -3,7 +3,7 @@ use ide_db::imports_locator;
3use itertools::Itertools; 3use itertools::Itertools;
4use syntax::{ 4use syntax::{
5 ast::{self, make, AstNode}, 5 ast::{self, make, AstNode},
6 Direction, SmolStr, 6 Direction,
7 SyntaxKind::{IDENT, WHITESPACE}, 7 SyntaxKind::{IDENT, WHITESPACE},
8 TextSize, 8 TextSize,
9}; 9};
@@ -43,17 +43,18 @@ pub(crate) fn replace_derive_with_manual_impl(
43) -> Option<()> { 43) -> Option<()> {
44 let attr = ctx.find_node_at_offset::<ast::Attr>()?; 44 let attr = ctx.find_node_at_offset::<ast::Attr>()?;
45 45
46 let attr_name = attr 46 let has_derive = attr
47 .syntax() 47 .syntax()
48 .descendants_with_tokens() 48 .descendants_with_tokens()
49 .filter(|t| t.kind() == IDENT) 49 .filter(|t| t.kind() == IDENT)
50 .find_map(syntax::NodeOrToken::into_token) 50 .find_map(syntax::NodeOrToken::into_token)
51 .filter(|t| t.text() == "derive")? 51 .filter(|t| t.text() == "derive")
52 .text() 52 .is_some();
53 .clone(); 53 if !has_derive {
54 return None;
55 }
54 56
55 let trait_token = 57 let trait_token = ctx.token_at_offset().find(|t| t.kind() == IDENT && t.text() != "derive")?;
56 ctx.token_at_offset().find(|t| t.kind() == IDENT && *t.text() != attr_name)?;
57 let trait_path = make::path_unqualified(make::path_segment(make::name_ref(trait_token.text()))); 58 let trait_path = make::path_unqualified(make::path_segment(make::name_ref(trait_token.text())));
58 59
59 let annotated_name = attr.syntax().siblings(Direction::Next).find_map(ast::Name::cast)?; 60 let annotated_name = attr.syntax().siblings(Direction::Next).find_map(ast::Name::cast)?;
@@ -176,9 +177,9 @@ fn update_attribute(
176 .syntax() 177 .syntax()
177 .descendants_with_tokens() 178 .descendants_with_tokens()
178 .filter(|t| t.kind() == IDENT) 179 .filter(|t| t.kind() == IDENT)
179 .filter_map(|t| t.into_token().map(|t| t.text().clone())) 180 .filter_map(|t| t.into_token().map(|t| t.text().to_string()))
180 .filter(|t| t != trait_name.text()) 181 .filter(|t| t != trait_name.text())
181 .collect::<Vec<SmolStr>>(); 182 .collect::<Vec<_>>();
182 let has_more_derives = !new_attr_input.is_empty(); 183 let has_more_derives = !new_attr_input.is_empty();
183 184
184 if has_more_derives { 185 if has_more_derives {
diff --git a/crates/assists/src/utils.rs b/crates/assists/src/utils.rs
index fc9f83bab..44c35bafa 100644
--- a/crates/assists/src/utils.rs
+++ b/crates/assists/src/utils.rs
@@ -223,7 +223,7 @@ fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> {
223 let method = mce.name_ref()?; 223 let method = mce.name_ref()?;
224 let arg_list = mce.arg_list()?; 224 let arg_list = mce.arg_list()?;
225 225
226 let method = match method.text().as_str() { 226 let method = match method.text() {
227 "is_some" => "is_none", 227 "is_some" => "is_none",
228 "is_none" => "is_some", 228 "is_none" => "is_some",
229 "is_ok" => "is_err", 229 "is_ok" => "is_err",
diff --git a/crates/completion/src/completions/unqualified_path.rs b/crates/completion/src/completions/unqualified_path.rs
index ac5596ca4..809e1645a 100644
--- a/crates/completion/src/completions/unqualified_path.rs
+++ b/crates/completion/src/completions/unqualified_path.rs
@@ -29,6 +29,10 @@ pub(crate) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionC
29 } 29 }
30 30
31 ctx.scope.process_all_names(&mut |name, res| { 31 ctx.scope.process_all_names(&mut |name, res| {
32 if let ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_)) = res {
33 mark::hit!(skip_lifetime_completion);
34 return;
35 }
32 if ctx.use_item_syntax.is_some() { 36 if ctx.use_item_syntax.is_some() {
33 if let (ScopeDef::Unknown, Some(name_ref)) = (&res, &ctx.name_ref_syntax) { 37 if let (ScopeDef::Unknown, Some(name_ref)) = (&res, &ctx.name_ref_syntax) {
34 if name_ref.syntax().text() == name.to_string().as_str() { 38 if name_ref.syntax().text() == name.to_string().as_str() {
@@ -37,7 +41,7 @@ pub(crate) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionC
37 } 41 }
38 } 42 }
39 } 43 }
40 acc.add_resolution(ctx, name.to_string(), &res) 44 acc.add_resolution(ctx, name.to_string(), &res);
41 }); 45 });
42} 46}
43 47
@@ -234,6 +238,24 @@ fn main() {
234 fn quux() fn quux<T>() 238 fn quux() fn quux<T>()
235 "#]], 239 "#]],
236 ); 240 );
241 check(
242 r#"fn quux<const C: usize>() { $0 }"#,
243 expect![[r#"
244 tp C
245 fn quux() fn quux<const C: usize>()
246 "#]],
247 );
248 }
249
250 #[test]
251 fn does_not_complete_lifetimes() {
252 mark::check!(skip_lifetime_completion);
253 check(
254 r#"fn quux<'a>() { $0 }"#,
255 expect![[r#"
256 fn quux() fn quux<'a>()
257 "#]],
258 );
237 } 259 }
238 260
239 #[test] 261 #[test]
diff --git a/crates/hir/src/code_model.rs b/crates/hir/src/code_model.rs
index 5a4c27906..a4141e111 100644
--- a/crates/hir/src/code_model.rs
+++ b/crates/hir/src/code_model.rs
@@ -2046,7 +2046,7 @@ impl Callable {
2046pub enum ScopeDef { 2046pub enum ScopeDef {
2047 ModuleDef(ModuleDef), 2047 ModuleDef(ModuleDef),
2048 MacroDef(MacroDef), 2048 MacroDef(MacroDef),
2049 GenericParam(TypeParam), 2049 GenericParam(GenericParam),
2050 ImplSelfType(Impl), 2050 ImplSelfType(Impl),
2051 AdtSelfType(Adt), 2051 AdtSelfType(Adt),
2052 Local(Local), 2052 Local(Local),
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index ab213e04c..0a30b4f5b 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -814,7 +814,7 @@ impl<'a> SemanticsScope<'a> {
814 } 814 }
815 resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()), 815 resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
816 resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()), 816 resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
817 resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }), 817 resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
818 resolver::ScopeDef::Local(pat_id) => { 818 resolver::ScopeDef::Local(pat_id) => {
819 let parent = resolver.body_owner().unwrap().into(); 819 let parent = resolver.body_owner().unwrap().into();
820 ScopeDef::Local(Local { parent, pat_id }) 820 ScopeDef::Local(Local { parent, pat_id })
diff --git a/crates/hir_def/src/resolver.rs b/crates/hir_def/src/resolver.rs
index e7e92c72d..a505bf2be 100644
--- a/crates/hir_def/src/resolver.rs
+++ b/crates/hir_def/src/resolver.rs
@@ -21,8 +21,9 @@ use crate::{
21 per_ns::PerNs, 21 per_ns::PerNs,
22 visibility::{RawVisibility, Visibility}, 22 visibility::{RawVisibility, Visibility},
23 AdtId, AssocContainerId, ConstId, ConstParamId, ContainerId, DefWithBodyId, EnumId, 23 AdtId, AssocContainerId, ConstId, ConstParamId, ContainerId, DefWithBodyId, EnumId,
24 EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, LocalModuleId, Lookup, ModuleDefId, 24 EnumVariantId, FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, LifetimeParamId,
25 ModuleId, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, VariantId, 25 LocalModuleId, Lookup, ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId,
26 TypeParamId, VariantId,
26}; 27};
27 28
28#[derive(Debug, Clone, Default)] 29#[derive(Debug, Clone, Default)]
@@ -484,7 +485,7 @@ pub enum ScopeDef {
484 PerNs(PerNs), 485 PerNs(PerNs),
485 ImplSelfType(ImplId), 486 ImplSelfType(ImplId),
486 AdtSelfType(AdtId), 487 AdtSelfType(AdtId),
487 GenericParam(TypeParamId), 488 GenericParam(GenericParamId),
488 Local(PatId), 489 Local(PatId),
489} 490}
490 491
@@ -527,15 +528,21 @@ impl Scope {
527 Scope::LocalItemsScope(body) => body.item_scope.entries().for_each(|(name, def)| { 528 Scope::LocalItemsScope(body) => body.item_scope.entries().for_each(|(name, def)| {
528 f(name.clone(), ScopeDef::PerNs(def)); 529 f(name.clone(), ScopeDef::PerNs(def));
529 }), 530 }),
530 Scope::GenericParams { params, def } => { 531 &Scope::GenericParams { ref params, def: parent } => {
531 for (local_id, param) in params.types.iter() { 532 for (local_id, param) in params.types.iter() {
532 if let Some(name) = &param.name { 533 if let Some(ref name) = param.name {
533 f( 534 let id = TypeParamId { local_id, parent };
534 name.clone(), 535 f(name.clone(), ScopeDef::GenericParam(id.into()))
535 ScopeDef::GenericParam(TypeParamId { local_id, parent: *def }),
536 )
537 } 536 }
538 } 537 }
538 for (local_id, param) in params.consts.iter() {
539 let id = ConstParamId { local_id, parent };
540 f(param.name.clone(), ScopeDef::GenericParam(id.into()))
541 }
542 for (local_id, param) in params.lifetimes.iter() {
543 let id = LifetimeParamId { local_id, parent };
544 f(param.name.clone(), ScopeDef::GenericParam(id.into()))
545 }
539 } 546 }
540 Scope::ImplDefScope(i) => { 547 Scope::ImplDefScope(i) => {
541 f(name![Self], ScopeDef::ImplSelfType(*i)); 548 f(name![Self], ScopeDef::ImplSelfType(*i));
diff --git a/crates/hir_expand/src/builtin_derive.rs b/crates/hir_expand/src/builtin_derive.rs
index eb257579f..b7f1aae8f 100644
--- a/crates/hir_expand/src/builtin_derive.rs
+++ b/crates/hir_expand/src/builtin_derive.rs
@@ -102,7 +102,7 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> {
102 debug!("name token not found"); 102 debug!("name token not found");
103 mbe::ExpandError::ConversionError 103 mbe::ExpandError::ConversionError
104 })?; 104 })?;
105 let name_token = tt::Ident { id: name_token_id, text: name.text().clone() }; 105 let name_token = tt::Ident { id: name_token_id, text: name.text().into() };
106 let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count()); 106 let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count());
107 Ok(BasicAdtInfo { name: name_token, type_params }) 107 Ok(BasicAdtInfo { name: name_token, type_params })
108} 108}
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs
index 467516eb7..cb6e23320 100644
--- a/crates/hir_expand/src/db.rs
+++ b/crates/hir_expand/src/db.rs
@@ -173,7 +173,7 @@ fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
173 }; 173 };
174 let loc = db.lookup_intern_macro(id); 174 let loc = db.lookup_intern_macro(id);
175 let arg = loc.kind.arg(db)?; 175 let arg = loc.kind.arg(db)?;
176 Some(arg.green().clone()) 176 Some(arg.green().to_owned())
177} 177}
178 178
179fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { 179fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
diff --git a/crates/hir_expand/src/name.rs b/crates/hir_expand/src/name.rs
index 95d853b6d..d692cec14 100644
--- a/crates/hir_expand/src/name.rs
+++ b/crates/hir_expand/src/name.rs
@@ -38,7 +38,7 @@ impl Name {
38 } 38 }
39 39
40 pub fn new_lifetime(lt: &ast::Lifetime) -> Name { 40 pub fn new_lifetime(lt: &ast::Lifetime) -> Name {
41 Self::new_text(lt.text().clone()) 41 Self::new_text(lt.text().into())
42 } 42 }
43 43
44 /// Shortcut to create inline plain text name 44 /// Shortcut to create inline plain text name
@@ -47,12 +47,12 @@ impl Name {
47 } 47 }
48 48
49 /// Resolve a name from the text of token. 49 /// Resolve a name from the text of token.
50 fn resolve(raw_text: &SmolStr) -> Name { 50 fn resolve(raw_text: &str) -> Name {
51 let raw_start = "r#"; 51 let raw_start = "r#";
52 if raw_text.as_str().starts_with(raw_start) { 52 if raw_text.starts_with(raw_start) {
53 Name::new_text(SmolStr::new(&raw_text[raw_start.len()..])) 53 Name::new_text(SmolStr::new(&raw_text[raw_start.len()..]))
54 } else { 54 } else {
55 Name::new_text(raw_text.clone()) 55 Name::new_text(raw_text.into())
56 } 56 }
57 } 57 }
58 58
diff --git a/crates/ide/src/display/navigation_target.rs b/crates/ide/src/display/navigation_target.rs
index 00e601244..671aa1373 100644
--- a/crates/ide/src/display/navigation_target.rs
+++ b/crates/ide/src/display/navigation_target.rs
@@ -153,8 +153,7 @@ impl NavigationTarget {
153 node: InFile<&dyn ast::NameOwner>, 153 node: InFile<&dyn ast::NameOwner>,
154 kind: SymbolKind, 154 kind: SymbolKind,
155 ) -> NavigationTarget { 155 ) -> NavigationTarget {
156 let name = 156 let name = node.value.name().map(|it| it.text().into()).unwrap_or_else(|| "_".into());
157 node.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_"));
158 let focus_range = 157 let focus_range =
159 node.value.name().map(|it| node.with_value(it.syntax()).original_file_range(db).range); 158 node.value.name().map(|it| node.with_value(it.syntax()).original_file_range(db).range);
160 let frange = node.map(|it| it.syntax()).original_file_range(db); 159 let frange = node.map(|it| it.syntax()).original_file_range(db);
diff --git a/crates/ide/src/display/short_label.rs b/crates/ide/src/display/short_label.rs
index 990f740b8..b8e4cc181 100644
--- a/crates/ide/src/display/short_label.rs
+++ b/crates/ide/src/display/short_label.rs
@@ -90,7 +90,7 @@ impl ShortLabel for ast::Variant {
90impl ShortLabel for ast::ConstParam { 90impl ShortLabel for ast::ConstParam {
91 fn short_label(&self) -> Option<String> { 91 fn short_label(&self) -> Option<String> {
92 let mut buf = "const ".to_owned(); 92 let mut buf = "const ".to_owned();
93 buf.push_str(self.name()?.text().as_str()); 93 buf.push_str(self.name()?.text());
94 if let Some(type_ref) = self.ty() { 94 if let Some(type_ref) = self.ty() {
95 format_to!(buf, ": {}", type_ref.syntax()); 95 format_to!(buf, ": {}", type_ref.syntax());
96 } 96 }
@@ -117,6 +117,6 @@ where
117{ 117{
118 let mut buf = node.visibility().map(|v| format!("{} ", v.syntax())).unwrap_or_default(); 118 let mut buf = node.visibility().map(|v| format!("{} ", v.syntax())).unwrap_or_default();
119 buf.push_str(label); 119 buf.push_str(label);
120 buf.push_str(node.name()?.text().as_str()); 120 buf.push_str(node.name()?.text());
121 Some(buf) 121 Some(buf)
122} 122}
diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs
index 17a540972..2d722dee0 100644
--- a/crates/ide/src/extend_selection.rs
+++ b/crates/ide/src/extend_selection.rs
@@ -213,8 +213,8 @@ fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextSize) -> TextRange
213 let ws_text = ws.text(); 213 let ws_text = ws.text();
214 let suffix = TextRange::new(offset, ws.text_range().end()) - ws.text_range().start(); 214 let suffix = TextRange::new(offset, ws.text_range().end()) - ws.text_range().start();
215 let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start(); 215 let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start();
216 let ws_suffix = &ws_text.as_str()[suffix]; 216 let ws_suffix = &ws_text[suffix];
217 let ws_prefix = &ws_text.as_str()[prefix]; 217 let ws_prefix = &ws_text[prefix];
218 if ws_text.contains('\n') && !ws_suffix.contains('\n') { 218 if ws_text.contains('\n') && !ws_suffix.contains('\n') {
219 if let Some(node) = ws.next_sibling_or_token() { 219 if let Some(node) = ws.next_sibling_or_token() {
220 let start = match ws_prefix.rfind('\n') { 220 let start = match ws_prefix.rfind('\n') {
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs
index a2039fcc7..54485fd30 100644
--- a/crates/ide/src/inlay_hints.rs
+++ b/crates/ide/src/inlay_hints.rs
@@ -411,7 +411,7 @@ fn get_string_representation(expr: &ast::Expr) -> Option<String> {
411 match expr { 411 match expr {
412 ast::Expr::MethodCallExpr(method_call_expr) => { 412 ast::Expr::MethodCallExpr(method_call_expr) => {
413 let name_ref = method_call_expr.name_ref()?; 413 let name_ref = method_call_expr.name_ref()?;
414 match name_ref.text().as_str() { 414 match name_ref.text() {
415 "clone" => method_call_expr.receiver().map(|rec| rec.to_string()), 415 "clone" => method_call_expr.receiver().map(|rec| rec.to_string()),
416 name_ref => Some(name_ref.to_owned()), 416 name_ref => Some(name_ref.to_owned()),
417 } 417 }
diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs
index 981467c8d..631bde0f1 100644
--- a/crates/ide/src/join_lines.rs
+++ b/crates/ide/src/join_lines.rs
@@ -59,7 +59,7 @@ fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextS
59 // The node is either the first or the last in the file 59 // The node is either the first or the last in the file
60 let suff = &token.text()[TextRange::new( 60 let suff = &token.text()[TextRange::new(
61 offset - token.text_range().start() + TextSize::of('\n'), 61 offset - token.text_range().start() + TextSize::of('\n'),
62 TextSize::of(token.text().as_str()), 62 TextSize::of(token.text()),
63 )]; 63 )];
64 let spaces = suff.bytes().take_while(|&b| b == b' ').count(); 64 let spaces = suff.bytes().take_while(|&b| b == b' ').count();
65 65
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs
index 8976f1080..47a85dc45 100644
--- a/crates/ide/src/runnables.rs
+++ b/crates/ide/src/runnables.rs
@@ -9,6 +9,7 @@ use syntax::{
9 ast::{self, AstNode, AttrsOwner}, 9 ast::{self, AstNode, AttrsOwner},
10 match_ast, SyntaxNode, 10 match_ast, SyntaxNode,
11}; 11};
12use test_utils::mark;
12 13
13use crate::{ 14use crate::{
14 display::{ToNav, TryToNav}, 15 display::{ToNav, TryToNav},
@@ -96,28 +97,26 @@ impl Runnable {
96pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { 97pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
97 let sema = Semantics::new(db); 98 let sema = Semantics::new(db);
98 let module = match sema.to_module_def(file_id) { 99 let module = match sema.to_module_def(file_id) {
99 None => return vec![], 100 None => return Vec::new(),
100 Some(it) => it, 101 Some(it) => it,
101 }; 102 };
102 103
103 runnables_mod(&sema, module) 104 let mut res = Vec::new();
105 runnables_mod(&sema, &mut res, module);
106 res
104} 107}
105 108
106fn runnables_mod(sema: &Semantics<RootDatabase>, module: hir::Module) -> Vec<Runnable> { 109fn runnables_mod(sema: &Semantics<RootDatabase>, acc: &mut Vec<Runnable>, module: hir::Module) {
107 let mut res: Vec<Runnable> = module 110 acc.extend(module.declarations(sema.db).into_iter().filter_map(|def| {
108 .declarations(sema.db) 111 let runnable = match def {
109 .into_iter() 112 hir::ModuleDef::Module(it) => runnable_mod(&sema, it),
110 .filter_map(|def| { 113 hir::ModuleDef::Function(it) => runnable_fn(&sema, it),
111 let runnable = match def { 114 _ => None,
112 hir::ModuleDef::Module(it) => runnable_mod(&sema, it), 115 };
113 hir::ModuleDef::Function(it) => runnable_fn(&sema, it), 116 runnable.or_else(|| module_def_doctest(&sema, def))
114 _ => None, 117 }));
115 };
116 runnable.or_else(|| module_def_doctest(&sema, def))
117 })
118 .collect();
119 118
120 res.extend(module.impl_defs(sema.db).into_iter().flat_map(|it| it.items(sema.db)).filter_map( 119 acc.extend(module.impl_defs(sema.db).into_iter().flat_map(|it| it.items(sema.db)).filter_map(
121 |def| match def { 120 |def| match def {
122 hir::AssocItem::Function(it) => { 121 hir::AssocItem::Function(it) => {
123 runnable_fn(&sema, it).or_else(|| module_def_doctest(&sema, it.into())) 122 runnable_fn(&sema, it).or_else(|| module_def_doctest(&sema, it.into()))
@@ -127,12 +126,14 @@ fn runnables_mod(sema: &Semantics<RootDatabase>, module: hir::Module) -> Vec<Run
127 }, 126 },
128 )); 127 ));
129 128
130 res.extend(module.declarations(sema.db).into_iter().flat_map(|def| match def { 129 for def in module.declarations(sema.db) {
131 hir::ModuleDef::Module(it) => runnables_mod(sema, it), 130 if let hir::ModuleDef::Module(submodule) = def {
132 _ => vec![], 131 match submodule.definition_source(sema.db).value {
133 })); 132 hir::ModuleSource::Module(_) => runnables_mod(sema, acc, submodule),
134 133 hir::ModuleSource::SourceFile(_) => mark::hit!(dont_recurse_in_outline_submodules),
135 res 134 }
135 }
136 }
136} 137}
137 138
138pub(crate) fn runnable_fn(sema: &Semantics<RootDatabase>, def: hir::Function) -> Option<Runnable> { 139pub(crate) fn runnable_fn(sema: &Semantics<RootDatabase>, def: hir::Function) -> Option<Runnable> {
@@ -326,6 +327,7 @@ fn has_test_function_or_multiple_test_submodules(
326#[cfg(test)] 327#[cfg(test)]
327mod tests { 328mod tests {
328 use expect_test::{expect, Expect}; 329 use expect_test::{expect, Expect};
330 use test_utils::mark;
329 331
330 use crate::fixture; 332 use crate::fixture;
331 333
@@ -1050,4 +1052,25 @@ mod tests {
1050 "#]], 1052 "#]],
1051 ); 1053 );
1052 } 1054 }
1055
1056 #[test]
1057 fn dont_recurse_in_outline_submodules() {
1058 mark::check!(dont_recurse_in_outline_submodules);
1059 check(
1060 r#"
1061//- /lib.rs
1062$0
1063mod m;
1064//- /m.rs
1065mod tests {
1066 #[test]
1067 fn t() {}
1068}
1069"#,
1070 &[],
1071 expect![[r#"
1072 []
1073 "#]],
1074 );
1075 }
1053} 1076}
diff --git a/crates/ide/src/syntax_highlighting/format.rs b/crates/ide/src/syntax_highlighting/format.rs
index a74ca844b..8a9b5ca8c 100644
--- a/crates/ide/src/syntax_highlighting/format.rs
+++ b/crates/ide/src/syntax_highlighting/format.rs
@@ -30,7 +30,7 @@ fn is_format_string(string: &ast::String) -> Option<()> {
30 let parent = string.syntax().parent(); 30 let parent = string.syntax().parent();
31 31
32 let name = parent.parent().and_then(ast::MacroCall::cast)?.path()?.segment()?.name_ref()?; 32 let name = parent.parent().and_then(ast::MacroCall::cast)?.path()?.segment()?.name_ref()?;
33 if !matches!(name.text().as_str(), "format_args" | "format_args_nl") { 33 if !matches!(name.text(), "format_args" | "format_args_nl") {
34 return None; 34 return None;
35 } 35 }
36 36
diff --git a/crates/ide/src/syntax_highlighting/inject.rs b/crates/ide/src/syntax_highlighting/inject.rs
index 281461493..8cdc3688f 100644
--- a/crates/ide/src/syntax_highlighting/inject.rs
+++ b/crates/ide/src/syntax_highlighting/inject.rs
@@ -116,7 +116,7 @@ pub(super) fn doc_comment(hl: &mut Highlights, node: &SyntaxNode) {
116 None => (), 116 None => (),
117 } 117 }
118 118
119 let line: &str = comment.text().as_str(); 119 let line: &str = comment.text();
120 let range = comment.syntax().text_range(); 120 let range = comment.syntax().text_range();
121 121
122 let mut pos = TextSize::of(comment.prefix()); 122 let mut pos = TextSize::of(comment.prefix());
diff --git a/crates/ide_db/src/defs.rs b/crates/ide_db/src/defs.rs
index d9875ffef..a8091dbee 100644
--- a/crates/ide_db/src/defs.rs
+++ b/crates/ide_db/src/defs.rs
@@ -343,7 +343,7 @@ impl NameRefClass {
343 hir::AssocItem::TypeAlias(it) => Some(*it), 343 hir::AssocItem::TypeAlias(it) => Some(*it),
344 _ => None, 344 _ => None,
345 }) 345 })
346 .find(|alias| alias.name(sema.db).to_string() == **name_ref.text()) 346 .find(|alias| &alias.name(sema.db).to_string() == name_ref.text())
347 { 347 {
348 return Some(NameRefClass::Definition(Definition::ModuleDef( 348 return Some(NameRefClass::Definition(Definition::ModuleDef(
349 ModuleDef::TypeAlias(ty), 349 ModuleDef::TypeAlias(ty),
diff --git a/crates/ide_db/src/helpers/insert_use.rs b/crates/ide_db/src/helpers/insert_use.rs
index 877d4f1c7..fd4035198 100644
--- a/crates/ide_db/src/helpers/insert_use.rs
+++ b/crates/ide_db/src/helpers/insert_use.rs
@@ -507,7 +507,7 @@ impl ImportGroup {
507 PathSegmentKind::SelfKw => ImportGroup::ThisModule, 507 PathSegmentKind::SelfKw => ImportGroup::ThisModule,
508 PathSegmentKind::SuperKw => ImportGroup::SuperModule, 508 PathSegmentKind::SuperKw => ImportGroup::SuperModule,
509 PathSegmentKind::CrateKw => ImportGroup::ThisCrate, 509 PathSegmentKind::CrateKw => ImportGroup::ThisCrate,
510 PathSegmentKind::Name(name) => match name.text().as_str() { 510 PathSegmentKind::Name(name) => match name.text() {
511 "std" => ImportGroup::Std, 511 "std" => ImportGroup::Std,
512 "core" => ImportGroup::Std, 512 "core" => ImportGroup::Std,
513 _ => ImportGroup::ExternCrate, 513 _ => ImportGroup::ExternCrate,
diff --git a/crates/ide_db/src/symbol_index.rs b/crates/ide_db/src/symbol_index.rs
index 0aa6a0765..500bdfd6b 100644
--- a/crates/ide_db/src/symbol_index.rs
+++ b/crates/ide_db/src/symbol_index.rs
@@ -209,7 +209,7 @@ pub fn crate_symbols(db: &RootDatabase, krate: CrateId, query: Query) -> Vec<Fil
209 query.search(&buf) 209 query.search(&buf)
210} 210}
211 211
212pub fn index_resolve(db: &RootDatabase, name: &SmolStr) -> Vec<FileSymbol> { 212pub fn index_resolve(db: &RootDatabase, name: &str) -> Vec<FileSymbol> {
213 let mut query = Query::new(name.to_string()); 213 let mut query = Query::new(name.to_string());
214 query.exact(); 214 query.exact();
215 query.limit(4); 215 query.limit(4);
@@ -409,7 +409,7 @@ fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
409 fn decl<N: NameOwner>(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { 409 fn decl<N: NameOwner>(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
410 let name = node.name()?; 410 let name = node.name()?;
411 let name_range = name.syntax().text_range(); 411 let name_range = name.syntax().text_range();
412 let name = name.text().clone(); 412 let name = name.text().into();
413 let ptr = SyntaxNodePtr::new(node.syntax()); 413 let ptr = SyntaxNodePtr::new(node.syntax());
414 414
415 Some((name, ptr, name_range)) 415 Some((name, ptr, name_range))
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index 51002e7b8..0cdc175be 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -507,7 +507,7 @@ impl SrcToken for SynToken {
507 } 507 }
508 } 508 }
509 fn to_text(&self) -> SmolStr { 509 fn to_text(&self) -> SmolStr {
510 self.token().text().clone() 510 self.token().text().into()
511 } 511 }
512} 512}
513 513
@@ -682,10 +682,8 @@ impl<'a> TreeSink for TtTreeSink<'a> {
682 self.text_pos += TextSize::of(text); 682 self.text_pos += TextSize::of(text);
683 } 683 }
684 684
685 let text = SmolStr::new(self.buf.as_str()); 685 self.inner.token(kind, self.buf.as_str());
686 self.buf.clear(); 686 self.buf.clear();
687 self.inner.token(kind, text);
688
689 // Add whitespace between adjoint puncts 687 // Add whitespace between adjoint puncts
690 let next = last.bump(); 688 let next = last.bump();
691 if let ( 689 if let (
diff --git a/crates/ssr/src/matching.rs b/crates/ssr/src/matching.rs
index 42d313f91..df013bae9 100644
--- a/crates/ssr/src/matching.rs
+++ b/crates/ssr/src/matching.rs
@@ -10,8 +10,11 @@ use hir::Semantics;
10use ide_db::base_db::FileRange; 10use ide_db::base_db::FileRange;
11use rustc_hash::FxHashMap; 11use rustc_hash::FxHashMap;
12use std::{cell::Cell, iter::Peekable}; 12use std::{cell::Cell, iter::Peekable};
13use syntax::ast::{AstNode, AstToken};
14use syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken}; 13use syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken};
14use syntax::{
15 ast::{AstNode, AstToken},
16 SmolStr,
17};
15use test_utils::mark; 18use test_utils::mark;
16 19
17// Creates a match error. If we're currently attempting to match some code that we thought we were 20// Creates a match error. If we're currently attempting to match some code that we thought we were
@@ -398,11 +401,11 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
398 code: &SyntaxNode, 401 code: &SyntaxNode,
399 ) -> Result<(), MatchFailed> { 402 ) -> Result<(), MatchFailed> {
400 // Build a map keyed by field name. 403 // Build a map keyed by field name.
401 let mut fields_by_name = FxHashMap::default(); 404 let mut fields_by_name: FxHashMap<SmolStr, SyntaxNode> = FxHashMap::default();
402 for child in code.children() { 405 for child in code.children() {
403 if let Some(record) = ast::RecordExprField::cast(child.clone()) { 406 if let Some(record) = ast::RecordExprField::cast(child.clone()) {
404 if let Some(name) = record.field_name() { 407 if let Some(name) = record.field_name() {
405 fields_by_name.insert(name.text().clone(), child.clone()); 408 fields_by_name.insert(name.text().into(), child.clone());
406 } 409 }
407 } 410 }
408 } 411 }
@@ -473,9 +476,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
473 } 476 }
474 SyntaxElement::Node(n) => { 477 SyntaxElement::Node(n) => {
475 if let Some(first_token) = n.first_token() { 478 if let Some(first_token) = n.first_token() {
476 if Some(first_token.text().as_str()) 479 if Some(first_token.text()) == next_pattern_token.as_deref() {
477 == next_pattern_token.as_deref()
478 {
479 if let Some(SyntaxElement::Node(p)) = pattern.next() { 480 if let Some(SyntaxElement::Node(p)) = pattern.next() {
480 // We have a subtree that starts with the next token in our pattern. 481 // We have a subtree that starts with the next token in our pattern.
481 self.attempt_match_token_tree(phase, &p, &n)?; 482 self.attempt_match_token_tree(phase, &p, &n)?;
diff --git a/crates/ssr/src/replacing.rs b/crates/ssr/src/replacing.rs
index 7e7ce37bd..06a94a46c 100644
--- a/crates/ssr/src/replacing.rs
+++ b/crates/ssr/src/replacing.rs
@@ -173,7 +173,7 @@ impl ReplacementRenderer<'_> {
173 ); 173 );
174 } 174 }
175 } else { 175 } else {
176 self.out.push_str(token.text().as_str()); 176 self.out.push_str(token.text());
177 } 177 }
178 } 178 }
179 179
diff --git a/crates/ssr/src/resolving.rs b/crates/ssr/src/resolving.rs
index f5ceb5729..14e5a3b69 100644
--- a/crates/ssr/src/resolving.rs
+++ b/crates/ssr/src/resolving.rs
@@ -228,7 +228,7 @@ impl<'db> ResolutionScope<'db> {
228 None, 228 None,
229 |_ty, assoc_item| { 229 |_ty, assoc_item| {
230 let item_name = assoc_item.name(self.scope.db)?; 230 let item_name = assoc_item.name(self.scope.db)?;
231 if item_name.to_string().as_str() == name.text().as_str() { 231 if item_name.to_string().as_str() == name.text() {
232 Some(hir::PathResolution::AssocItem(assoc_item)) 232 Some(hir::PathResolution::AssocItem(assoc_item))
233 } else { 233 } else {
234 None 234 None
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml
index 52394b337..55b437a3a 100644
--- a/crates/syntax/Cargo.toml
+++ b/crates/syntax/Cargo.toml
@@ -12,15 +12,12 @@ doctest = false
12 12
13[dependencies] 13[dependencies]
14itertools = "0.10.0" 14itertools = "0.10.0"
15rowan = "0.10.3" 15rowan = "0.12"
16rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" } 16rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" }
17rustc-hash = "1.1.0" 17rustc-hash = "1.1.0"
18arrayvec = "0.5.1" 18arrayvec = "0.5.1"
19once_cell = "1.3.1" 19once_cell = "1.3.1"
20indexmap = "1.4.0" 20indexmap = "1.4.0"
21# This crate transitively depends on `smol_str` via `rowan`.
22# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here
23# to reduce number of compilations
24smol_str = { version = "0.1.15", features = ["serde"] } 21smol_str = { version = "0.1.15", features = ["serde"] }
25serde = { version = "1.0.106", features = ["derive"] } 22serde = { version = "1.0.106", features = ["derive"] }
26 23
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs
index 827ae78f9..2ff92f9f6 100644
--- a/crates/syntax/src/algo.rs
+++ b/crates/syntax/src/algo.rs
@@ -4,6 +4,7 @@ use std::{
4 fmt, 4 fmt,
5 hash::BuildHasherDefault, 5 hash::BuildHasherDefault,
6 ops::{self, RangeInclusive}, 6 ops::{self, RangeInclusive},
7 ptr,
7}; 8};
8 9
9use indexmap::IndexMap; 10use indexmap::IndexMap;
@@ -171,7 +172,7 @@ pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
171 && lhs.text_range().len() == rhs.text_range().len() 172 && lhs.text_range().len() == rhs.text_range().len()
172 && match (&lhs, &rhs) { 173 && match (&lhs, &rhs) {
173 (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => { 174 (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => {
174 lhs.green() == rhs.green() || lhs.text() == rhs.text() 175 ptr::eq(lhs.green(), rhs.green()) || lhs.text() == rhs.text()
175 } 176 }
176 (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(), 177 (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(),
177 _ => false, 178 _ => false,
@@ -566,7 +567,7 @@ impl<'a> SyntaxRewriter<'a> {
566 567
567fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { 568fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
568 match element { 569 match element {
569 NodeOrToken::Node(it) => NodeOrToken::Node(it.green().clone()), 570 NodeOrToken::Node(it) => NodeOrToken::Node(it.green().to_owned()),
570 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), 571 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()),
571 } 572 }
572} 573}
@@ -624,7 +625,7 @@ fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize {
624 625
625fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { 626fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
626 match element { 627 match element {
627 NodeOrToken::Node(it) => it.green().clone().into(), 628 NodeOrToken::Node(it) => it.green().to_owned().into(),
628 NodeOrToken::Token(it) => it.green().clone().into(), 629 NodeOrToken::Token(it) => it.green().clone().into(),
629 } 630 }
630} 631}
diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs
index 83de067d9..a25ff655e 100644
--- a/crates/syntax/src/ast.rs
+++ b/crates/syntax/src/ast.rs
@@ -12,7 +12,7 @@ use std::marker::PhantomData;
12 12
13use crate::{ 13use crate::{
14 syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken}, 14 syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken},
15 SmolStr, SyntaxKind, 15 SyntaxKind,
16}; 16};
17 17
18pub use self::{ 18pub use self::{
@@ -54,7 +54,7 @@ pub trait AstToken {
54 54
55 fn syntax(&self) -> &SyntaxToken; 55 fn syntax(&self) -> &SyntaxToken;
56 56
57 fn text(&self) -> &SmolStr { 57 fn text(&self) -> &str {
58 self.syntax().text() 58 self.syntax().text()
59 } 59 }
60} 60}
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index 9ffc3ae11..b755c9692 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -478,7 +478,7 @@ fn ast_from_text<N: AstNode>(text: &str) -> N {
478} 478}
479 479
480fn unroot(n: SyntaxNode) -> SyntaxNode { 480fn unroot(n: SyntaxNode) -> SyntaxNode {
481 SyntaxNode::new_root(n.green().clone()) 481 SyntaxNode::new_root(n.green().to_owned())
482} 482}
483 483
484pub mod tokens { 484pub mod tokens {
@@ -495,7 +495,7 @@ pub mod tokens {
495 .syntax() 495 .syntax()
496 .descendants_with_tokens() 496 .descendants_with_tokens()
497 .filter_map(|it| it.into_token()) 497 .filter_map(|it| it.into_token())
498 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ") 498 .find(|it| it.kind() == WHITESPACE && it.text() == " ")
499 .unwrap() 499 .unwrap()
500 } 500 }
501 501
@@ -523,7 +523,7 @@ pub mod tokens {
523 .syntax() 523 .syntax()
524 .descendants_with_tokens() 524 .descendants_with_tokens()
525 .filter_map(|it| it.into_token()) 525 .filter_map(|it| it.into_token())
526 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n") 526 .find(|it| it.kind() == WHITESPACE && it.text() == "\n")
527 .unwrap() 527 .unwrap()
528 } 528 }
529 529
@@ -533,7 +533,7 @@ pub mod tokens {
533 .syntax() 533 .syntax()
534 .descendants_with_tokens() 534 .descendants_with_tokens()
535 .filter_map(|it| it.into_token()) 535 .filter_map(|it| it.into_token())
536 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n\n") 536 .find(|it| it.kind() == WHITESPACE && it.text() == "\n\n")
537 .unwrap() 537 .unwrap()
538 } 538 }
539 539
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index 738c92a5b..5c8cf900f 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -13,19 +13,19 @@ use crate::{
13}; 13};
14 14
15impl ast::Lifetime { 15impl ast::Lifetime {
16 pub fn text(&self) -> &SmolStr { 16 pub fn text(&self) -> &str {
17 text_of_first_token(self.syntax()) 17 text_of_first_token(self.syntax())
18 } 18 }
19} 19}
20 20
21impl ast::Name { 21impl ast::Name {
22 pub fn text(&self) -> &SmolStr { 22 pub fn text(&self) -> &str {
23 text_of_first_token(self.syntax()) 23 text_of_first_token(self.syntax())
24 } 24 }
25} 25}
26 26
27impl ast::NameRef { 27impl ast::NameRef {
28 pub fn text(&self) -> &SmolStr { 28 pub fn text(&self) -> &str {
29 text_of_first_token(self.syntax()) 29 text_of_first_token(self.syntax())
30 } 30 }
31 31
@@ -34,7 +34,7 @@ impl ast::NameRef {
34 } 34 }
35} 35}
36 36
37fn text_of_first_token(node: &SyntaxNode) -> &SmolStr { 37fn text_of_first_token(node: &SyntaxNode) -> &str {
38 node.green().children().next().and_then(|it| it.into_token()).unwrap().text() 38 node.green().children().next().and_then(|it| it.into_token()).unwrap().text()
39} 39}
40 40
@@ -121,7 +121,7 @@ impl ast::Attr {
121 pub fn simple_name(&self) -> Option<SmolStr> { 121 pub fn simple_name(&self) -> Option<SmolStr> {
122 let path = self.path()?; 122 let path = self.path()?;
123 match (path.segment(), path.qualifier()) { 123 match (path.segment(), path.qualifier()) {
124 (Some(segment), None) => Some(segment.syntax().first_token()?.text().clone()), 124 (Some(segment), None) => Some(segment.syntax().first_token()?.text().into()),
125 _ => None, 125 _ => None,
126 } 126 }
127 } 127 }
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs
index 5e9620a40..5e07ec7d1 100644
--- a/crates/syntax/src/ast/token_ext.rs
+++ b/crates/syntax/src/ast/token_ext.rs
@@ -41,7 +41,7 @@ impl ast::Comment {
41 match kind { 41 match kind {
42 CommentKind { shape, doc: Some(_) } => { 42 CommentKind { shape, doc: Some(_) } => {
43 let prefix = kind.prefix(); 43 let prefix = kind.prefix();
44 let text = &self.text().as_str()[prefix.len()..]; 44 let text = &self.text()[prefix.len()..];
45 let ws = text.chars().next().filter(|c| c.is_whitespace()); 45 let ws = text.chars().next().filter(|c| c.is_whitespace());
46 let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]); 46 let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]);
47 match shape { 47 match shape {
@@ -156,13 +156,13 @@ impl ast::String {
156 156
157 pub fn value(&self) -> Option<Cow<'_, str>> { 157 pub fn value(&self) -> Option<Cow<'_, str>> {
158 if self.is_raw() { 158 if self.is_raw() {
159 let text = self.text().as_str(); 159 let text = self.text();
160 let text = 160 let text =
161 &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; 161 &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
162 return Some(Cow::Borrowed(text)); 162 return Some(Cow::Borrowed(text));
163 } 163 }
164 164
165 let text = self.text().as_str(); 165 let text = self.text();
166 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; 166 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
167 167
168 let mut buf = String::new(); 168 let mut buf = String::new();
@@ -190,7 +190,7 @@ impl ast::String {
190 } 190 }
191 191
192 pub fn quote_offsets(&self) -> Option<QuoteOffsets> { 192 pub fn quote_offsets(&self) -> Option<QuoteOffsets> {
193 let text = self.text().as_str(); 193 let text = self.text();
194 let offsets = QuoteOffsets::new(text)?; 194 let offsets = QuoteOffsets::new(text)?;
195 let o = self.syntax().text_range().start(); 195 let o = self.syntax().text_range().start();
196 let offsets = QuoteOffsets { 196 let offsets = QuoteOffsets {
@@ -560,7 +560,7 @@ impl HasFormatSpecifier for ast::String {
560 fn char_ranges( 560 fn char_ranges(
561 &self, 561 &self,
562 ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> { 562 ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> {
563 let text = self.text().as_str(); 563 let text = self.text();
564 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; 564 let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
565 let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start(); 565 let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start();
566 566
@@ -590,7 +590,7 @@ impl ast::IntNumber {
590 pub fn value(&self) -> Option<u128> { 590 pub fn value(&self) -> Option<u128> {
591 let token = self.syntax(); 591 let token = self.syntax();
592 592
593 let mut text = token.text().as_str(); 593 let mut text = token.text();
594 if let Some(suffix) = self.suffix() { 594 if let Some(suffix) = self.suffix() {
595 text = &text[..text.len() - suffix.len()] 595 text = &text[..text.len() - suffix.len()]
596 } 596 }
diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs
index ea7482bb1..11294c5b2 100644
--- a/crates/syntax/src/lib.rs
+++ b/crates/syntax/src/lib.rs
@@ -56,9 +56,9 @@ pub use crate::{
56}; 56};
57pub use parser::{SyntaxKind, T}; 57pub use parser::{SyntaxKind, T};
58pub use rowan::{ 58pub use rowan::{
59 Direction, GreenNode, NodeOrToken, SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset, 59 Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent,
60 WalkEvent,
61}; 60};
61pub use smol_str::SmolStr;
62 62
63/// `Parse` is the result of the parsing: a syntax tree and a collection of 63/// `Parse` is the result of the parsing: a syntax tree and a collection of
64/// errors. 64/// errors.
diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs
index 76f01084c..3d637bf91 100644
--- a/crates/syntax/src/parsing/reparsing.rs
+++ b/crates/syntax/src/parsing/reparsing.rs
@@ -73,8 +73,7 @@ fn reparse_token<'node>(
73 new_text.pop(); 73 new_text.pop();
74 } 74 }
75 75
76 let new_token = 76 let new_token = GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), &new_text);
77 GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), new_text.into());
78 Some(( 77 Some((
79 prev_token.replace_with(new_token), 78 prev_token.replace_with(new_token),
80 new_err.into_iter().collect(), 79 new_err.into_iter().collect(),
diff --git a/crates/syntax/src/parsing/text_tree_sink.rs b/crates/syntax/src/parsing/text_tree_sink.rs
index ce27c3dd9..d5ddc076f 100644
--- a/crates/syntax/src/parsing/text_tree_sink.rs
+++ b/crates/syntax/src/parsing/text_tree_sink.rs
@@ -8,7 +8,7 @@ use crate::{
8 ast, 8 ast,
9 parsing::Token, 9 parsing::Token,
10 syntax_node::GreenNode, 10 syntax_node::GreenNode,
11 SmolStr, SyntaxError, 11 SyntaxError,
12 SyntaxKind::{self, *}, 12 SyntaxKind::{self, *},
13 SyntaxTreeBuilder, TextRange, TextSize, 13 SyntaxTreeBuilder, TextRange, TextSize,
14}; 14};
@@ -135,7 +135,7 @@ impl<'a> TextTreeSink<'a> {
135 135
136 fn do_token(&mut self, kind: SyntaxKind, len: TextSize, n_tokens: usize) { 136 fn do_token(&mut self, kind: SyntaxKind, len: TextSize, n_tokens: usize) {
137 let range = TextRange::at(self.text_pos, len); 137 let range = TextRange::at(self.text_pos, len);
138 let text: SmolStr = self.text[range].into(); 138 let text = &self.text[range];
139 self.text_pos += len; 139 self.text_pos += len;
140 self.token_pos += n_tokens; 140 self.token_pos += n_tokens;
141 self.inner.token(kind, text); 141 self.inner.token(kind, text);
diff --git a/crates/syntax/src/syntax_node.rs b/crates/syntax/src/syntax_node.rs
index cc30138fa..8f643b228 100644
--- a/crates/syntax/src/syntax_node.rs
+++ b/crates/syntax/src/syntax_node.rs
@@ -8,7 +8,7 @@
8 8
9use rowan::{GreenNodeBuilder, Language}; 9use rowan::{GreenNodeBuilder, Language};
10 10
11use crate::{Parse, SmolStr, SyntaxError, SyntaxKind, TextSize}; 11use crate::{Parse, SyntaxError, SyntaxKind, TextSize};
12 12
13pub(crate) use rowan::{GreenNode, GreenToken, NodeOrToken}; 13pub(crate) use rowan::{GreenNode, GreenToken, NodeOrToken};
14 14
@@ -53,7 +53,7 @@ impl SyntaxTreeBuilder {
53 Parse::new(green, errors) 53 Parse::new(green, errors)
54 } 54 }
55 55
56 pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) { 56 pub fn token(&mut self, kind: SyntaxKind, text: &str) {
57 let kind = RustLanguage::kind_to_raw(kind); 57 let kind = RustLanguage::kind_to_raw(kind);
58 self.inner.token(kind, text) 58 self.inner.token(kind, text)
59 } 59 }
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs
index 7901580ee..7694e8834 100644
--- a/crates/syntax/src/validation.rs
+++ b/crates/syntax/src/validation.rs
@@ -116,7 +116,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
116 } 116 }
117 117
118 let token = literal.token(); 118 let token = literal.token();
119 let text = token.text().as_str(); 119 let text = token.text();
120 120
121 // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205) 121 // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205)
122 let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| { 122 let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| {
diff --git a/docs/dev/style.md b/docs/dev/style.md
index 21330948b..389649398 100644
--- a/docs/dev/style.md
+++ b/docs/dev/style.md
@@ -280,6 +280,9 @@ Prefer `Default` even it has to be implemented manually.
280 280
281**Rationale:** less typing in the common case, uniformity. 281**Rationale:** less typing in the common case, uniformity.
282 282
283Use `Vec::new` rather than `vec![]`. **Rationale:** uniformity, strength
284reduction.
285
283## Functions Over Objects 286## Functions Over Objects
284 287
285Avoid creating "doer" objects. 288Avoid creating "doer" objects.
@@ -418,12 +421,44 @@ fn frobnicate(s: &str) {
418**Rationale:** reveals the costs. 421**Rationale:** reveals the costs.
419It is also more efficient when the caller already owns the allocation. 422It is also more efficient when the caller already owns the allocation.
420 423
421## Collection types 424## Collection Types
422 425
423Prefer `rustc_hash::FxHashMap` and `rustc_hash::FxHashSet` instead of the ones in `std::collections`. 426Prefer `rustc_hash::FxHashMap` and `rustc_hash::FxHashSet` instead of the ones in `std::collections`.
424 427
425**Rationale:** they use a hasher that's significantly faster and using them consistently will reduce code size by some small amount. 428**Rationale:** they use a hasher that's significantly faster and using them consistently will reduce code size by some small amount.
426 429
430## Avoid Intermediate Collections
431
432When writing a recursive function to compute a sets of things, use an accumulator parameter instead of returning a fresh collection.
433Accumulator goes first in the list of arguments.
434
435```rust
436// GOOD
437pub fn reachable_nodes(node: Node) -> FxHashSet<Node> {
438 let mut res = FxHashSet::default();
439 go(&mut res, node);
440 res
441}
442fn go(acc: &mut FxHashSet<Node>, node: Node) {
443 acc.insert(node);
444 for n in node.neighbors() {
445 go(acc, n);
446 }
447}
448
449// BAD
450pub fn reachable_nodes(node: Node) -> FxHashSet<Node> {
451 let mut res = FxHashSet::default();
452 res.insert(node);
453 for n in node.neighbors() {
454 res.extend(reachable_nodes(n));
455 }
456 res
457}
458```
459
460**Rational:** re-use allocations, accumulator style is more concise for complex cases.
461
427# Style 462# Style
428 463
429## Order of Imports 464## Order of Imports