diff options
28 files changed, 69 insertions, 67 deletions
diff --git a/Cargo.lock b/Cargo.lock index c9e0d63a6..130722dec 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -1376,8 +1376,6 @@ checksum = "b5eb417147ba9860a96cfe72a0b93bf88fee1744b5636ec99ab20c1aa9376581" | |||
1376 | [[package]] | 1376 | [[package]] |
1377 | name = "rowan" | 1377 | name = "rowan" |
1378 | version = "0.11.0" | 1378 | version = "0.11.0" |
1379 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
1380 | checksum = "b3ae0ae5091cf38acfb834dbb6adcd45bb0d6b6a72ca5798e134195d2fa33574" | ||
1381 | dependencies = [ | 1379 | dependencies = [ |
1382 | "hashbrown", | 1380 | "hashbrown", |
1383 | "memoffset", | 1381 | "memoffset", |
diff --git a/crates/assists/src/handlers/generate_impl.rs b/crates/assists/src/handlers/generate_impl.rs index 9af45192b..827477272 100644 --- a/crates/assists/src/handlers/generate_impl.rs +++ b/crates/assists/src/handlers/generate_impl.rs | |||
@@ -1,6 +1,9 @@ | |||
1 | use itertools::Itertools; | 1 | use itertools::Itertools; |
2 | use stdx::format_to; | 2 | use stdx::format_to; |
3 | use syntax::ast::{self, AstNode, AttrsOwner, GenericParamsOwner, NameOwner}; | 3 | use syntax::{ |
4 | ast::{self, AstNode, AttrsOwner, GenericParamsOwner, NameOwner}, | ||
5 | SmolStr, | ||
6 | }; | ||
4 | 7 | ||
5 | use crate::{AssistContext, AssistId, AssistKind, Assists}; | 8 | use crate::{AssistContext, AssistId, AssistKind, Assists}; |
6 | 9 | ||
@@ -49,16 +52,16 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<() | |||
49 | format_to!(buf, "{}", type_params.syntax()); | 52 | format_to!(buf, "{}", type_params.syntax()); |
50 | } | 53 | } |
51 | buf.push_str(" "); | 54 | buf.push_str(" "); |
52 | buf.push_str(name.text().as_str()); | 55 | buf.push_str(name.text()); |
53 | if let Some(type_params) = type_params { | 56 | if let Some(type_params) = type_params { |
54 | let lifetime_params = type_params | 57 | let lifetime_params = type_params |
55 | .lifetime_params() | 58 | .lifetime_params() |
56 | .filter_map(|it| it.lifetime()) | 59 | .filter_map(|it| it.lifetime()) |
57 | .map(|it| it.text().clone()); | 60 | .map(|it| SmolStr::from(it.text())); |
58 | let type_params = type_params | 61 | let type_params = type_params |
59 | .type_params() | 62 | .type_params() |
60 | .filter_map(|it| it.name()) | 63 | .filter_map(|it| it.name()) |
61 | .map(|it| it.text().clone()); | 64 | .map(|it| SmolStr::from(it.text())); |
62 | 65 | ||
63 | let generic_params = lifetime_params.chain(type_params).format(", "); | 66 | let generic_params = lifetime_params.chain(type_params).format(", "); |
64 | format_to!(buf, "<{}>", generic_params) | 67 | format_to!(buf, "<{}>", generic_params) |
diff --git a/crates/assists/src/handlers/generate_new.rs b/crates/assists/src/handlers/generate_new.rs index 5c52b2bc8..b7390855a 100644 --- a/crates/assists/src/handlers/generate_new.rs +++ b/crates/assists/src/handlers/generate_new.rs | |||
@@ -3,7 +3,7 @@ use itertools::Itertools; | |||
3 | use stdx::format_to; | 3 | use stdx::format_to; |
4 | use syntax::{ | 4 | use syntax::{ |
5 | ast::{self, AstNode, GenericParamsOwner, NameOwner, StructKind, VisibilityOwner}, | 5 | ast::{self, AstNode, GenericParamsOwner, NameOwner, StructKind, VisibilityOwner}, |
6 | T, | 6 | SmolStr, T, |
7 | }; | 7 | }; |
8 | 8 | ||
9 | use crate::{AssistContext, AssistId, AssistKind, Assists}; | 9 | use crate::{AssistContext, AssistId, AssistKind, Assists}; |
@@ -95,14 +95,14 @@ fn generate_impl_text(strukt: &ast::Struct, code: &str) -> String { | |||
95 | format_to!(buf, "{}", type_params.syntax()); | 95 | format_to!(buf, "{}", type_params.syntax()); |
96 | } | 96 | } |
97 | buf.push_str(" "); | 97 | buf.push_str(" "); |
98 | buf.push_str(strukt.name().unwrap().text().as_str()); | 98 | buf.push_str(strukt.name().unwrap().text()); |
99 | if let Some(type_params) = type_params { | 99 | if let Some(type_params) = type_params { |
100 | let lifetime_params = type_params | 100 | let lifetime_params = type_params |
101 | .lifetime_params() | 101 | .lifetime_params() |
102 | .filter_map(|it| it.lifetime()) | 102 | .filter_map(|it| it.lifetime()) |
103 | .map(|it| it.text().clone()); | 103 | .map(|it| SmolStr::from(it.text())); |
104 | let type_params = | 104 | let type_params = |
105 | type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone()); | 105 | type_params.type_params().filter_map(|it| it.name()).map(|it| SmolStr::from(it.text())); |
106 | format_to!(buf, "<{}>", lifetime_params.chain(type_params).format(", ")) | 106 | format_to!(buf, "<{}>", lifetime_params.chain(type_params).format(", ")) |
107 | } | 107 | } |
108 | 108 | ||
diff --git a/crates/assists/src/handlers/raw_string.rs b/crates/assists/src/handlers/raw_string.rs index be963f162..d95267607 100644 --- a/crates/assists/src/handlers/raw_string.rs +++ b/crates/assists/src/handlers/raw_string.rs | |||
@@ -138,7 +138,7 @@ pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> | |||
138 | return None; | 138 | return None; |
139 | } | 139 | } |
140 | 140 | ||
141 | let text = token.text().as_str(); | 141 | let text = token.text(); |
142 | if !text.starts_with("r#") && text.ends_with('#') { | 142 | if !text.starts_with("r#") && text.ends_with('#') { |
143 | return None; | 143 | return None; |
144 | } | 144 | } |
diff --git a/crates/assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/assists/src/handlers/replace_derive_with_manual_impl.rs index bd4c1c806..6aa9d2f2c 100644 --- a/crates/assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/crates/assists/src/handlers/replace_derive_with_manual_impl.rs | |||
@@ -3,7 +3,7 @@ use ide_db::imports_locator; | |||
3 | use itertools::Itertools; | 3 | use itertools::Itertools; |
4 | use syntax::{ | 4 | use syntax::{ |
5 | ast::{self, make, AstNode}, | 5 | ast::{self, make, AstNode}, |
6 | Direction, SmolStr, | 6 | Direction, |
7 | SyntaxKind::{IDENT, WHITESPACE}, | 7 | SyntaxKind::{IDENT, WHITESPACE}, |
8 | TextSize, | 8 | TextSize, |
9 | }; | 9 | }; |
@@ -43,17 +43,18 @@ pub(crate) fn replace_derive_with_manual_impl( | |||
43 | ) -> Option<()> { | 43 | ) -> Option<()> { |
44 | let attr = ctx.find_node_at_offset::<ast::Attr>()?; | 44 | let attr = ctx.find_node_at_offset::<ast::Attr>()?; |
45 | 45 | ||
46 | let attr_name = attr | 46 | let has_derive = attr |
47 | .syntax() | 47 | .syntax() |
48 | .descendants_with_tokens() | 48 | .descendants_with_tokens() |
49 | .filter(|t| t.kind() == IDENT) | 49 | .filter(|t| t.kind() == IDENT) |
50 | .find_map(syntax::NodeOrToken::into_token) | 50 | .find_map(syntax::NodeOrToken::into_token) |
51 | .filter(|t| t.text() == "derive")? | 51 | .filter(|t| t.text() == "derive") |
52 | .text() | 52 | .is_some(); |
53 | .clone(); | 53 | if !has_derive { |
54 | return None; | ||
55 | } | ||
54 | 56 | ||
55 | let trait_token = | 57 | let trait_token = ctx.token_at_offset().find(|t| t.kind() == IDENT && t.text() != "derive")?; |
56 | ctx.token_at_offset().find(|t| t.kind() == IDENT && *t.text() != attr_name)?; | ||
57 | let trait_path = make::path_unqualified(make::path_segment(make::name_ref(trait_token.text()))); | 58 | let trait_path = make::path_unqualified(make::path_segment(make::name_ref(trait_token.text()))); |
58 | 59 | ||
59 | let annotated_name = attr.syntax().siblings(Direction::Next).find_map(ast::Name::cast)?; | 60 | let annotated_name = attr.syntax().siblings(Direction::Next).find_map(ast::Name::cast)?; |
@@ -176,9 +177,9 @@ fn update_attribute( | |||
176 | .syntax() | 177 | .syntax() |
177 | .descendants_with_tokens() | 178 | .descendants_with_tokens() |
178 | .filter(|t| t.kind() == IDENT) | 179 | .filter(|t| t.kind() == IDENT) |
179 | .filter_map(|t| t.into_token().map(|t| t.text().clone())) | 180 | .filter_map(|t| t.into_token().map(|t| t.text().to_string())) |
180 | .filter(|t| t != trait_name.text()) | 181 | .filter(|t| t != trait_name.text()) |
181 | .collect::<Vec<SmolStr>>(); | 182 | .collect::<Vec<_>>(); |
182 | let has_more_derives = !new_attr_input.is_empty(); | 183 | let has_more_derives = !new_attr_input.is_empty(); |
183 | 184 | ||
184 | if has_more_derives { | 185 | if has_more_derives { |
diff --git a/crates/assists/src/utils.rs b/crates/assists/src/utils.rs index fc9f83bab..44c35bafa 100644 --- a/crates/assists/src/utils.rs +++ b/crates/assists/src/utils.rs | |||
@@ -223,7 +223,7 @@ fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> { | |||
223 | let method = mce.name_ref()?; | 223 | let method = mce.name_ref()?; |
224 | let arg_list = mce.arg_list()?; | 224 | let arg_list = mce.arg_list()?; |
225 | 225 | ||
226 | let method = match method.text().as_str() { | 226 | let method = match method.text() { |
227 | "is_some" => "is_none", | 227 | "is_some" => "is_none", |
228 | "is_none" => "is_some", | 228 | "is_none" => "is_some", |
229 | "is_ok" => "is_err", | 229 | "is_ok" => "is_err", |
diff --git a/crates/hir_expand/src/builtin_derive.rs b/crates/hir_expand/src/builtin_derive.rs index eb257579f..b7f1aae8f 100644 --- a/crates/hir_expand/src/builtin_derive.rs +++ b/crates/hir_expand/src/builtin_derive.rs | |||
@@ -102,7 +102,7 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> { | |||
102 | debug!("name token not found"); | 102 | debug!("name token not found"); |
103 | mbe::ExpandError::ConversionError | 103 | mbe::ExpandError::ConversionError |
104 | })?; | 104 | })?; |
105 | let name_token = tt::Ident { id: name_token_id, text: name.text().clone() }; | 105 | let name_token = tt::Ident { id: name_token_id, text: name.text().into() }; |
106 | let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count()); | 106 | let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count()); |
107 | Ok(BasicAdtInfo { name: name_token, type_params }) | 107 | Ok(BasicAdtInfo { name: name_token, type_params }) |
108 | } | 108 | } |
diff --git a/crates/hir_expand/src/name.rs b/crates/hir_expand/src/name.rs index 95d853b6d..d692cec14 100644 --- a/crates/hir_expand/src/name.rs +++ b/crates/hir_expand/src/name.rs | |||
@@ -38,7 +38,7 @@ impl Name { | |||
38 | } | 38 | } |
39 | 39 | ||
40 | pub fn new_lifetime(lt: &ast::Lifetime) -> Name { | 40 | pub fn new_lifetime(lt: &ast::Lifetime) -> Name { |
41 | Self::new_text(lt.text().clone()) | 41 | Self::new_text(lt.text().into()) |
42 | } | 42 | } |
43 | 43 | ||
44 | /// Shortcut to create inline plain text name | 44 | /// Shortcut to create inline plain text name |
@@ -47,12 +47,12 @@ impl Name { | |||
47 | } | 47 | } |
48 | 48 | ||
49 | /// Resolve a name from the text of token. | 49 | /// Resolve a name from the text of token. |
50 | fn resolve(raw_text: &SmolStr) -> Name { | 50 | fn resolve(raw_text: &str) -> Name { |
51 | let raw_start = "r#"; | 51 | let raw_start = "r#"; |
52 | if raw_text.as_str().starts_with(raw_start) { | 52 | if raw_text.starts_with(raw_start) { |
53 | Name::new_text(SmolStr::new(&raw_text[raw_start.len()..])) | 53 | Name::new_text(SmolStr::new(&raw_text[raw_start.len()..])) |
54 | } else { | 54 | } else { |
55 | Name::new_text(raw_text.clone()) | 55 | Name::new_text(raw_text.into()) |
56 | } | 56 | } |
57 | } | 57 | } |
58 | 58 | ||
diff --git a/crates/ide/src/display/navigation_target.rs b/crates/ide/src/display/navigation_target.rs index 00e601244..671aa1373 100644 --- a/crates/ide/src/display/navigation_target.rs +++ b/crates/ide/src/display/navigation_target.rs | |||
@@ -153,8 +153,7 @@ impl NavigationTarget { | |||
153 | node: InFile<&dyn ast::NameOwner>, | 153 | node: InFile<&dyn ast::NameOwner>, |
154 | kind: SymbolKind, | 154 | kind: SymbolKind, |
155 | ) -> NavigationTarget { | 155 | ) -> NavigationTarget { |
156 | let name = | 156 | let name = node.value.name().map(|it| it.text().into()).unwrap_or_else(|| "_".into()); |
157 | node.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_")); | ||
158 | let focus_range = | 157 | let focus_range = |
159 | node.value.name().map(|it| node.with_value(it.syntax()).original_file_range(db).range); | 158 | node.value.name().map(|it| node.with_value(it.syntax()).original_file_range(db).range); |
160 | let frange = node.map(|it| it.syntax()).original_file_range(db); | 159 | let frange = node.map(|it| it.syntax()).original_file_range(db); |
diff --git a/crates/ide/src/display/short_label.rs b/crates/ide/src/display/short_label.rs index 990f740b8..b8e4cc181 100644 --- a/crates/ide/src/display/short_label.rs +++ b/crates/ide/src/display/short_label.rs | |||
@@ -90,7 +90,7 @@ impl ShortLabel for ast::Variant { | |||
90 | impl ShortLabel for ast::ConstParam { | 90 | impl ShortLabel for ast::ConstParam { |
91 | fn short_label(&self) -> Option<String> { | 91 | fn short_label(&self) -> Option<String> { |
92 | let mut buf = "const ".to_owned(); | 92 | let mut buf = "const ".to_owned(); |
93 | buf.push_str(self.name()?.text().as_str()); | 93 | buf.push_str(self.name()?.text()); |
94 | if let Some(type_ref) = self.ty() { | 94 | if let Some(type_ref) = self.ty() { |
95 | format_to!(buf, ": {}", type_ref.syntax()); | 95 | format_to!(buf, ": {}", type_ref.syntax()); |
96 | } | 96 | } |
@@ -117,6 +117,6 @@ where | |||
117 | { | 117 | { |
118 | let mut buf = node.visibility().map(|v| format!("{} ", v.syntax())).unwrap_or_default(); | 118 | let mut buf = node.visibility().map(|v| format!("{} ", v.syntax())).unwrap_or_default(); |
119 | buf.push_str(label); | 119 | buf.push_str(label); |
120 | buf.push_str(node.name()?.text().as_str()); | 120 | buf.push_str(node.name()?.text()); |
121 | Some(buf) | 121 | Some(buf) |
122 | } | 122 | } |
diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs index 17a540972..2d722dee0 100644 --- a/crates/ide/src/extend_selection.rs +++ b/crates/ide/src/extend_selection.rs | |||
@@ -213,8 +213,8 @@ fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextSize) -> TextRange | |||
213 | let ws_text = ws.text(); | 213 | let ws_text = ws.text(); |
214 | let suffix = TextRange::new(offset, ws.text_range().end()) - ws.text_range().start(); | 214 | let suffix = TextRange::new(offset, ws.text_range().end()) - ws.text_range().start(); |
215 | let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start(); | 215 | let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start(); |
216 | let ws_suffix = &ws_text.as_str()[suffix]; | 216 | let ws_suffix = &ws_text[suffix]; |
217 | let ws_prefix = &ws_text.as_str()[prefix]; | 217 | let ws_prefix = &ws_text[prefix]; |
218 | if ws_text.contains('\n') && !ws_suffix.contains('\n') { | 218 | if ws_text.contains('\n') && !ws_suffix.contains('\n') { |
219 | if let Some(node) = ws.next_sibling_or_token() { | 219 | if let Some(node) = ws.next_sibling_or_token() { |
220 | let start = match ws_prefix.rfind('\n') { | 220 | let start = match ws_prefix.rfind('\n') { |
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index a2039fcc7..54485fd30 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs | |||
@@ -411,7 +411,7 @@ fn get_string_representation(expr: &ast::Expr) -> Option<String> { | |||
411 | match expr { | 411 | match expr { |
412 | ast::Expr::MethodCallExpr(method_call_expr) => { | 412 | ast::Expr::MethodCallExpr(method_call_expr) => { |
413 | let name_ref = method_call_expr.name_ref()?; | 413 | let name_ref = method_call_expr.name_ref()?; |
414 | match name_ref.text().as_str() { | 414 | match name_ref.text() { |
415 | "clone" => method_call_expr.receiver().map(|rec| rec.to_string()), | 415 | "clone" => method_call_expr.receiver().map(|rec| rec.to_string()), |
416 | name_ref => Some(name_ref.to_owned()), | 416 | name_ref => Some(name_ref.to_owned()), |
417 | } | 417 | } |
diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs index 981467c8d..631bde0f1 100644 --- a/crates/ide/src/join_lines.rs +++ b/crates/ide/src/join_lines.rs | |||
@@ -59,7 +59,7 @@ fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextS | |||
59 | // The node is either the first or the last in the file | 59 | // The node is either the first or the last in the file |
60 | let suff = &token.text()[TextRange::new( | 60 | let suff = &token.text()[TextRange::new( |
61 | offset - token.text_range().start() + TextSize::of('\n'), | 61 | offset - token.text_range().start() + TextSize::of('\n'), |
62 | TextSize::of(token.text().as_str()), | 62 | TextSize::of(token.text()), |
63 | )]; | 63 | )]; |
64 | let spaces = suff.bytes().take_while(|&b| b == b' ').count(); | 64 | let spaces = suff.bytes().take_while(|&b| b == b' ').count(); |
65 | 65 | ||
diff --git a/crates/ide/src/syntax_highlighting/format.rs b/crates/ide/src/syntax_highlighting/format.rs index a74ca844b..8a9b5ca8c 100644 --- a/crates/ide/src/syntax_highlighting/format.rs +++ b/crates/ide/src/syntax_highlighting/format.rs | |||
@@ -30,7 +30,7 @@ fn is_format_string(string: &ast::String) -> Option<()> { | |||
30 | let parent = string.syntax().parent(); | 30 | let parent = string.syntax().parent(); |
31 | 31 | ||
32 | let name = parent.parent().and_then(ast::MacroCall::cast)?.path()?.segment()?.name_ref()?; | 32 | let name = parent.parent().and_then(ast::MacroCall::cast)?.path()?.segment()?.name_ref()?; |
33 | if !matches!(name.text().as_str(), "format_args" | "format_args_nl") { | 33 | if !matches!(name.text(), "format_args" | "format_args_nl") { |
34 | return None; | 34 | return None; |
35 | } | 35 | } |
36 | 36 | ||
diff --git a/crates/ide/src/syntax_highlighting/inject.rs b/crates/ide/src/syntax_highlighting/inject.rs index 281461493..8cdc3688f 100644 --- a/crates/ide/src/syntax_highlighting/inject.rs +++ b/crates/ide/src/syntax_highlighting/inject.rs | |||
@@ -116,7 +116,7 @@ pub(super) fn doc_comment(hl: &mut Highlights, node: &SyntaxNode) { | |||
116 | None => (), | 116 | None => (), |
117 | } | 117 | } |
118 | 118 | ||
119 | let line: &str = comment.text().as_str(); | 119 | let line: &str = comment.text(); |
120 | let range = comment.syntax().text_range(); | 120 | let range = comment.syntax().text_range(); |
121 | 121 | ||
122 | let mut pos = TextSize::of(comment.prefix()); | 122 | let mut pos = TextSize::of(comment.prefix()); |
diff --git a/crates/ide_db/src/defs.rs b/crates/ide_db/src/defs.rs index d9875ffef..a8091dbee 100644 --- a/crates/ide_db/src/defs.rs +++ b/crates/ide_db/src/defs.rs | |||
@@ -343,7 +343,7 @@ impl NameRefClass { | |||
343 | hir::AssocItem::TypeAlias(it) => Some(*it), | 343 | hir::AssocItem::TypeAlias(it) => Some(*it), |
344 | _ => None, | 344 | _ => None, |
345 | }) | 345 | }) |
346 | .find(|alias| alias.name(sema.db).to_string() == **name_ref.text()) | 346 | .find(|alias| &alias.name(sema.db).to_string() == name_ref.text()) |
347 | { | 347 | { |
348 | return Some(NameRefClass::Definition(Definition::ModuleDef( | 348 | return Some(NameRefClass::Definition(Definition::ModuleDef( |
349 | ModuleDef::TypeAlias(ty), | 349 | ModuleDef::TypeAlias(ty), |
diff --git a/crates/ide_db/src/helpers/insert_use.rs b/crates/ide_db/src/helpers/insert_use.rs index 877d4f1c7..fd4035198 100644 --- a/crates/ide_db/src/helpers/insert_use.rs +++ b/crates/ide_db/src/helpers/insert_use.rs | |||
@@ -507,7 +507,7 @@ impl ImportGroup { | |||
507 | PathSegmentKind::SelfKw => ImportGroup::ThisModule, | 507 | PathSegmentKind::SelfKw => ImportGroup::ThisModule, |
508 | PathSegmentKind::SuperKw => ImportGroup::SuperModule, | 508 | PathSegmentKind::SuperKw => ImportGroup::SuperModule, |
509 | PathSegmentKind::CrateKw => ImportGroup::ThisCrate, | 509 | PathSegmentKind::CrateKw => ImportGroup::ThisCrate, |
510 | PathSegmentKind::Name(name) => match name.text().as_str() { | 510 | PathSegmentKind::Name(name) => match name.text() { |
511 | "std" => ImportGroup::Std, | 511 | "std" => ImportGroup::Std, |
512 | "core" => ImportGroup::Std, | 512 | "core" => ImportGroup::Std, |
513 | _ => ImportGroup::ExternCrate, | 513 | _ => ImportGroup::ExternCrate, |
diff --git a/crates/ide_db/src/symbol_index.rs b/crates/ide_db/src/symbol_index.rs index 0aa6a0765..500bdfd6b 100644 --- a/crates/ide_db/src/symbol_index.rs +++ b/crates/ide_db/src/symbol_index.rs | |||
@@ -209,7 +209,7 @@ pub fn crate_symbols(db: &RootDatabase, krate: CrateId, query: Query) -> Vec<Fil | |||
209 | query.search(&buf) | 209 | query.search(&buf) |
210 | } | 210 | } |
211 | 211 | ||
212 | pub fn index_resolve(db: &RootDatabase, name: &SmolStr) -> Vec<FileSymbol> { | 212 | pub fn index_resolve(db: &RootDatabase, name: &str) -> Vec<FileSymbol> { |
213 | let mut query = Query::new(name.to_string()); | 213 | let mut query = Query::new(name.to_string()); |
214 | query.exact(); | 214 | query.exact(); |
215 | query.limit(4); | 215 | query.limit(4); |
@@ -409,7 +409,7 @@ fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { | |||
409 | fn decl<N: NameOwner>(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { | 409 | fn decl<N: NameOwner>(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { |
410 | let name = node.name()?; | 410 | let name = node.name()?; |
411 | let name_range = name.syntax().text_range(); | 411 | let name_range = name.syntax().text_range(); |
412 | let name = name.text().clone(); | 412 | let name = name.text().into(); |
413 | let ptr = SyntaxNodePtr::new(node.syntax()); | 413 | let ptr = SyntaxNodePtr::new(node.syntax()); |
414 | 414 | ||
415 | Some((name, ptr, name_range)) | 415 | Some((name, ptr, name_range)) |
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 51002e7b8..1e1123889 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs | |||
@@ -507,7 +507,7 @@ impl SrcToken for SynToken { | |||
507 | } | 507 | } |
508 | } | 508 | } |
509 | fn to_text(&self) -> SmolStr { | 509 | fn to_text(&self) -> SmolStr { |
510 | self.token().text().clone() | 510 | self.token().text().into() |
511 | } | 511 | } |
512 | } | 512 | } |
513 | 513 | ||
diff --git a/crates/ssr/src/matching.rs b/crates/ssr/src/matching.rs index 42d313f91..df013bae9 100644 --- a/crates/ssr/src/matching.rs +++ b/crates/ssr/src/matching.rs | |||
@@ -10,8 +10,11 @@ use hir::Semantics; | |||
10 | use ide_db::base_db::FileRange; | 10 | use ide_db::base_db::FileRange; |
11 | use rustc_hash::FxHashMap; | 11 | use rustc_hash::FxHashMap; |
12 | use std::{cell::Cell, iter::Peekable}; | 12 | use std::{cell::Cell, iter::Peekable}; |
13 | use syntax::ast::{AstNode, AstToken}; | ||
14 | use syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken}; | 13 | use syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken}; |
14 | use syntax::{ | ||
15 | ast::{AstNode, AstToken}, | ||
16 | SmolStr, | ||
17 | }; | ||
15 | use test_utils::mark; | 18 | use test_utils::mark; |
16 | 19 | ||
17 | // Creates a match error. If we're currently attempting to match some code that we thought we were | 20 | // Creates a match error. If we're currently attempting to match some code that we thought we were |
@@ -398,11 +401,11 @@ impl<'db, 'sema> Matcher<'db, 'sema> { | |||
398 | code: &SyntaxNode, | 401 | code: &SyntaxNode, |
399 | ) -> Result<(), MatchFailed> { | 402 | ) -> Result<(), MatchFailed> { |
400 | // Build a map keyed by field name. | 403 | // Build a map keyed by field name. |
401 | let mut fields_by_name = FxHashMap::default(); | 404 | let mut fields_by_name: FxHashMap<SmolStr, SyntaxNode> = FxHashMap::default(); |
402 | for child in code.children() { | 405 | for child in code.children() { |
403 | if let Some(record) = ast::RecordExprField::cast(child.clone()) { | 406 | if let Some(record) = ast::RecordExprField::cast(child.clone()) { |
404 | if let Some(name) = record.field_name() { | 407 | if let Some(name) = record.field_name() { |
405 | fields_by_name.insert(name.text().clone(), child.clone()); | 408 | fields_by_name.insert(name.text().into(), child.clone()); |
406 | } | 409 | } |
407 | } | 410 | } |
408 | } | 411 | } |
@@ -473,9 +476,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> { | |||
473 | } | 476 | } |
474 | SyntaxElement::Node(n) => { | 477 | SyntaxElement::Node(n) => { |
475 | if let Some(first_token) = n.first_token() { | 478 | if let Some(first_token) = n.first_token() { |
476 | if Some(first_token.text().as_str()) | 479 | if Some(first_token.text()) == next_pattern_token.as_deref() { |
477 | == next_pattern_token.as_deref() | ||
478 | { | ||
479 | if let Some(SyntaxElement::Node(p)) = pattern.next() { | 480 | if let Some(SyntaxElement::Node(p)) = pattern.next() { |
480 | // We have a subtree that starts with the next token in our pattern. | 481 | // We have a subtree that starts with the next token in our pattern. |
481 | self.attempt_match_token_tree(phase, &p, &n)?; | 482 | self.attempt_match_token_tree(phase, &p, &n)?; |
diff --git a/crates/ssr/src/replacing.rs b/crates/ssr/src/replacing.rs index 7e7ce37bd..06a94a46c 100644 --- a/crates/ssr/src/replacing.rs +++ b/crates/ssr/src/replacing.rs | |||
@@ -173,7 +173,7 @@ impl ReplacementRenderer<'_> { | |||
173 | ); | 173 | ); |
174 | } | 174 | } |
175 | } else { | 175 | } else { |
176 | self.out.push_str(token.text().as_str()); | 176 | self.out.push_str(token.text()); |
177 | } | 177 | } |
178 | } | 178 | } |
179 | 179 | ||
diff --git a/crates/ssr/src/resolving.rs b/crates/ssr/src/resolving.rs index f5ceb5729..14e5a3b69 100644 --- a/crates/ssr/src/resolving.rs +++ b/crates/ssr/src/resolving.rs | |||
@@ -228,7 +228,7 @@ impl<'db> ResolutionScope<'db> { | |||
228 | None, | 228 | None, |
229 | |_ty, assoc_item| { | 229 | |_ty, assoc_item| { |
230 | let item_name = assoc_item.name(self.scope.db)?; | 230 | let item_name = assoc_item.name(self.scope.db)?; |
231 | if item_name.to_string().as_str() == name.text().as_str() { | 231 | if item_name.to_string().as_str() == name.text() { |
232 | Some(hir::PathResolution::AssocItem(assoc_item)) | 232 | Some(hir::PathResolution::AssocItem(assoc_item)) |
233 | } else { | 233 | } else { |
234 | None | 234 | None |
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 37d3faa03..165533388 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml | |||
@@ -12,7 +12,7 @@ doctest = false | |||
12 | 12 | ||
13 | [dependencies] | 13 | [dependencies] |
14 | itertools = "0.10.0" | 14 | itertools = "0.10.0" |
15 | rowan = "0.11" | 15 | rowan = { path="../../../rowan" } |
16 | rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" } | 16 | rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" } |
17 | rustc-hash = "1.1.0" | 17 | rustc-hash = "1.1.0" |
18 | arrayvec = "0.5.1" | 18 | arrayvec = "0.5.1" |
diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs index 83de067d9..a25ff655e 100644 --- a/crates/syntax/src/ast.rs +++ b/crates/syntax/src/ast.rs | |||
@@ -12,7 +12,7 @@ use std::marker::PhantomData; | |||
12 | 12 | ||
13 | use crate::{ | 13 | use crate::{ |
14 | syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken}, | 14 | syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken}, |
15 | SmolStr, SyntaxKind, | 15 | SyntaxKind, |
16 | }; | 16 | }; |
17 | 17 | ||
18 | pub use self::{ | 18 | pub use self::{ |
@@ -54,7 +54,7 @@ pub trait AstToken { | |||
54 | 54 | ||
55 | fn syntax(&self) -> &SyntaxToken; | 55 | fn syntax(&self) -> &SyntaxToken; |
56 | 56 | ||
57 | fn text(&self) -> &SmolStr { | 57 | fn text(&self) -> &str { |
58 | self.syntax().text() | 58 | self.syntax().text() |
59 | } | 59 | } |
60 | } | 60 | } |
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index cc1717237..b755c9692 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs | |||
@@ -495,7 +495,7 @@ pub mod tokens { | |||
495 | .syntax() | 495 | .syntax() |
496 | .descendants_with_tokens() | 496 | .descendants_with_tokens() |
497 | .filter_map(|it| it.into_token()) | 497 | .filter_map(|it| it.into_token()) |
498 | .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ") | 498 | .find(|it| it.kind() == WHITESPACE && it.text() == " ") |
499 | .unwrap() | 499 | .unwrap() |
500 | } | 500 | } |
501 | 501 | ||
@@ -523,7 +523,7 @@ pub mod tokens { | |||
523 | .syntax() | 523 | .syntax() |
524 | .descendants_with_tokens() | 524 | .descendants_with_tokens() |
525 | .filter_map(|it| it.into_token()) | 525 | .filter_map(|it| it.into_token()) |
526 | .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n") | 526 | .find(|it| it.kind() == WHITESPACE && it.text() == "\n") |
527 | .unwrap() | 527 | .unwrap() |
528 | } | 528 | } |
529 | 529 | ||
@@ -533,7 +533,7 @@ pub mod tokens { | |||
533 | .syntax() | 533 | .syntax() |
534 | .descendants_with_tokens() | 534 | .descendants_with_tokens() |
535 | .filter_map(|it| it.into_token()) | 535 | .filter_map(|it| it.into_token()) |
536 | .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n\n") | 536 | .find(|it| it.kind() == WHITESPACE && it.text() == "\n\n") |
537 | .unwrap() | 537 | .unwrap() |
538 | } | 538 | } |
539 | 539 | ||
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index 738c92a5b..5c8cf900f 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs | |||
@@ -13,19 +13,19 @@ use crate::{ | |||
13 | }; | 13 | }; |
14 | 14 | ||
15 | impl ast::Lifetime { | 15 | impl ast::Lifetime { |
16 | pub fn text(&self) -> &SmolStr { | 16 | pub fn text(&self) -> &str { |
17 | text_of_first_token(self.syntax()) | 17 | text_of_first_token(self.syntax()) |
18 | } | 18 | } |
19 | } | 19 | } |
20 | 20 | ||
21 | impl ast::Name { | 21 | impl ast::Name { |
22 | pub fn text(&self) -> &SmolStr { | 22 | pub fn text(&self) -> &str { |
23 | text_of_first_token(self.syntax()) | 23 | text_of_first_token(self.syntax()) |
24 | } | 24 | } |
25 | } | 25 | } |
26 | 26 | ||
27 | impl ast::NameRef { | 27 | impl ast::NameRef { |
28 | pub fn text(&self) -> &SmolStr { | 28 | pub fn text(&self) -> &str { |
29 | text_of_first_token(self.syntax()) | 29 | text_of_first_token(self.syntax()) |
30 | } | 30 | } |
31 | 31 | ||
@@ -34,7 +34,7 @@ impl ast::NameRef { | |||
34 | } | 34 | } |
35 | } | 35 | } |
36 | 36 | ||
37 | fn text_of_first_token(node: &SyntaxNode) -> &SmolStr { | 37 | fn text_of_first_token(node: &SyntaxNode) -> &str { |
38 | node.green().children().next().and_then(|it| it.into_token()).unwrap().text() | 38 | node.green().children().next().and_then(|it| it.into_token()).unwrap().text() |
39 | } | 39 | } |
40 | 40 | ||
@@ -121,7 +121,7 @@ impl ast::Attr { | |||
121 | pub fn simple_name(&self) -> Option<SmolStr> { | 121 | pub fn simple_name(&self) -> Option<SmolStr> { |
122 | let path = self.path()?; | 122 | let path = self.path()?; |
123 | match (path.segment(), path.qualifier()) { | 123 | match (path.segment(), path.qualifier()) { |
124 | (Some(segment), None) => Some(segment.syntax().first_token()?.text().clone()), | 124 | (Some(segment), None) => Some(segment.syntax().first_token()?.text().into()), |
125 | _ => None, | 125 | _ => None, |
126 | } | 126 | } |
127 | } | 127 | } |
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 5e9620a40..5e07ec7d1 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs | |||
@@ -41,7 +41,7 @@ impl ast::Comment { | |||
41 | match kind { | 41 | match kind { |
42 | CommentKind { shape, doc: Some(_) } => { | 42 | CommentKind { shape, doc: Some(_) } => { |
43 | let prefix = kind.prefix(); | 43 | let prefix = kind.prefix(); |
44 | let text = &self.text().as_str()[prefix.len()..]; | 44 | let text = &self.text()[prefix.len()..]; |
45 | let ws = text.chars().next().filter(|c| c.is_whitespace()); | 45 | let ws = text.chars().next().filter(|c| c.is_whitespace()); |
46 | let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]); | 46 | let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]); |
47 | match shape { | 47 | match shape { |
@@ -156,13 +156,13 @@ impl ast::String { | |||
156 | 156 | ||
157 | pub fn value(&self) -> Option<Cow<'_, str>> { | 157 | pub fn value(&self) -> Option<Cow<'_, str>> { |
158 | if self.is_raw() { | 158 | if self.is_raw() { |
159 | let text = self.text().as_str(); | 159 | let text = self.text(); |
160 | let text = | 160 | let text = |
161 | &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; | 161 | &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; |
162 | return Some(Cow::Borrowed(text)); | 162 | return Some(Cow::Borrowed(text)); |
163 | } | 163 | } |
164 | 164 | ||
165 | let text = self.text().as_str(); | 165 | let text = self.text(); |
166 | let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; | 166 | let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; |
167 | 167 | ||
168 | let mut buf = String::new(); | 168 | let mut buf = String::new(); |
@@ -190,7 +190,7 @@ impl ast::String { | |||
190 | } | 190 | } |
191 | 191 | ||
192 | pub fn quote_offsets(&self) -> Option<QuoteOffsets> { | 192 | pub fn quote_offsets(&self) -> Option<QuoteOffsets> { |
193 | let text = self.text().as_str(); | 193 | let text = self.text(); |
194 | let offsets = QuoteOffsets::new(text)?; | 194 | let offsets = QuoteOffsets::new(text)?; |
195 | let o = self.syntax().text_range().start(); | 195 | let o = self.syntax().text_range().start(); |
196 | let offsets = QuoteOffsets { | 196 | let offsets = QuoteOffsets { |
@@ -560,7 +560,7 @@ impl HasFormatSpecifier for ast::String { | |||
560 | fn char_ranges( | 560 | fn char_ranges( |
561 | &self, | 561 | &self, |
562 | ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> { | 562 | ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> { |
563 | let text = self.text().as_str(); | 563 | let text = self.text(); |
564 | let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; | 564 | let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; |
565 | let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start(); | 565 | let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start(); |
566 | 566 | ||
@@ -590,7 +590,7 @@ impl ast::IntNumber { | |||
590 | pub fn value(&self) -> Option<u128> { | 590 | pub fn value(&self) -> Option<u128> { |
591 | let token = self.syntax(); | 591 | let token = self.syntax(); |
592 | 592 | ||
593 | let mut text = token.text().as_str(); | 593 | let mut text = token.text(); |
594 | if let Some(suffix) = self.suffix() { | 594 | if let Some(suffix) = self.suffix() { |
595 | text = &text[..text.len() - suffix.len()] | 595 | text = &text[..text.len() - suffix.len()] |
596 | } | 596 | } |
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs index 7901580ee..7694e8834 100644 --- a/crates/syntax/src/validation.rs +++ b/crates/syntax/src/validation.rs | |||
@@ -116,7 +116,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) { | |||
116 | } | 116 | } |
117 | 117 | ||
118 | let token = literal.token(); | 118 | let token = literal.token(); |
119 | let text = token.text().as_str(); | 119 | let text = token.text(); |
120 | 120 | ||
121 | // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205) | 121 | // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205) |
122 | let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| { | 122 | let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| { |