diff options
41 files changed, 527 insertions, 557 deletions
diff --git a/Cargo.lock b/Cargo.lock index f9c34547e..a19b8339c 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -1295,9 +1295,9 @@ checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" | |||
1295 | 1295 | ||
1296 | [[package]] | 1296 | [[package]] |
1297 | name = "rowan" | 1297 | name = "rowan" |
1298 | version = "0.13.0-pre.5" | 1298 | version = "0.13.0-pre.6" |
1299 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1299 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1300 | checksum = "32a5fc82ed0b7e7fba157331f0d8f64abd73bced6e7ac2a4dfa0c4cf0ab584e8" | 1300 | checksum = "82ccc04e145e9a5ab51b9c12a81d77c4a8250d87a407ab02ac650451141ff00d" |
1301 | dependencies = [ | 1301 | dependencies = [ |
1302 | "countme", | 1302 | "countme", |
1303 | "hashbrown", | 1303 | "hashbrown", |
diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 1682d8bde..93cf6a3d6 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs | |||
@@ -215,6 +215,7 @@ impl FlycheckActor { | |||
215 | } => { | 215 | } => { |
216 | let mut cmd = Command::new(toolchain::cargo()); | 216 | let mut cmd = Command::new(toolchain::cargo()); |
217 | cmd.arg(command); | 217 | cmd.arg(command); |
218 | cmd.current_dir(&self.workspace_root); | ||
218 | cmd.args(&["--workspace", "--message-format=json", "--manifest-path"]) | 219 | cmd.args(&["--workspace", "--message-format=json", "--manifest-path"]) |
219 | .arg(self.workspace_root.join("Cargo.toml")); | 220 | .arg(self.workspace_root.join("Cargo.toml")); |
220 | 221 | ||
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 7e8a5bca3..d443b124c 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs | |||
@@ -875,6 +875,10 @@ impl Function { | |||
875 | db.function_data(self.id).is_unsafe() | 875 | db.function_data(self.id).is_unsafe() |
876 | } | 876 | } |
877 | 877 | ||
878 | pub fn is_async(self, db: &dyn HirDatabase) -> bool { | ||
879 | db.function_data(self.id).is_async() | ||
880 | } | ||
881 | |||
878 | pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) { | 882 | pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) { |
879 | let krate = self.module(db).id.krate(); | 883 | let krate = self.module(db).id.krate(); |
880 | hir_def::diagnostics::validate_body(db.upcast(), self.id.into(), sink); | 884 | hir_def::diagnostics::validate_body(db.upcast(), self.id.into(), sink); |
diff --git a/crates/hir_def/src/body.rs b/crates/hir_def/src/body.rs index 8360426f1..98b485b60 100644 --- a/crates/hir_def/src/body.rs +++ b/crates/hir_def/src/body.rs | |||
@@ -21,8 +21,6 @@ use profile::Count; | |||
21 | use rustc_hash::FxHashMap; | 21 | use rustc_hash::FxHashMap; |
22 | use syntax::{ast, AstNode, AstPtr}; | 22 | use syntax::{ast, AstNode, AstPtr}; |
23 | 23 | ||
24 | pub use lower::LowerCtx; | ||
25 | |||
26 | use crate::{ | 24 | use crate::{ |
27 | attr::{Attrs, RawAttrs}, | 25 | attr::{Attrs, RawAttrs}, |
28 | db::DefDatabase, | 26 | db::DefDatabase, |
@@ -35,6 +33,8 @@ use crate::{ | |||
35 | UnresolvedMacro, | 33 | UnresolvedMacro, |
36 | }; | 34 | }; |
37 | 35 | ||
36 | pub use lower::LowerCtx; | ||
37 | |||
38 | /// A subset of Expander that only deals with cfg attributes. We only need it to | 38 | /// A subset of Expander that only deals with cfg attributes. We only need it to |
39 | /// avoid cyclic queries in crate def map during enum processing. | 39 | /// avoid cyclic queries in crate def map during enum processing. |
40 | #[derive(Debug)] | 40 | #[derive(Debug)] |
diff --git a/crates/hir_def/src/find_path.rs b/crates/hir_def/src/find_path.rs index 858e88038..ee52794aa 100644 --- a/crates/hir_def/src/find_path.rs +++ b/crates/hir_def/src/find_path.rs | |||
@@ -5,10 +5,10 @@ use std::iter; | |||
5 | use hir_expand::name::{known, AsName, Name}; | 5 | use hir_expand::name::{known, AsName, Name}; |
6 | use rustc_hash::FxHashSet; | 6 | use rustc_hash::FxHashSet; |
7 | 7 | ||
8 | use crate::nameres::DefMap; | ||
9 | use crate::{ | 8 | use crate::{ |
10 | db::DefDatabase, | 9 | db::DefDatabase, |
11 | item_scope::ItemInNs, | 10 | item_scope::ItemInNs, |
11 | nameres::DefMap, | ||
12 | path::{ModPath, PathKind}, | 12 | path::{ModPath, PathKind}, |
13 | visibility::Visibility, | 13 | visibility::Visibility, |
14 | ModuleDefId, ModuleId, | 14 | ModuleDefId, ModuleId, |
@@ -134,7 +134,16 @@ fn find_path_inner( | |||
134 | for (name, def_id) in root_def_map.extern_prelude() { | 134 | for (name, def_id) in root_def_map.extern_prelude() { |
135 | if item == ItemInNs::Types(*def_id) { | 135 | if item == ItemInNs::Types(*def_id) { |
136 | let name = scope_name.unwrap_or_else(|| name.clone()); | 136 | let name = scope_name.unwrap_or_else(|| name.clone()); |
137 | return Some(ModPath::from_segments(PathKind::Plain, vec![name])); | 137 | |
138 | let name_already_occupied_in_type_ns = def_map | ||
139 | .with_ancestor_maps(db, from.local_id, &mut |def_map, local_id| { | ||
140 | def_map[local_id].scope.get(&name).take_types().filter(|&id| id != *def_id) | ||
141 | }) | ||
142 | .is_some(); | ||
143 | return Some(ModPath::from_segments( | ||
144 | if name_already_occupied_in_type_ns { PathKind::Abs } else { PathKind::Plain }, | ||
145 | vec![name], | ||
146 | )); | ||
138 | } | 147 | } |
139 | } | 148 | } |
140 | 149 | ||
diff --git a/crates/hir_expand/src/builtin_macro.rs b/crates/hir_expand/src/builtin_macro.rs index af9802144..280c25f11 100644 --- a/crates/hir_expand/src/builtin_macro.rs +++ b/crates/hir_expand/src/builtin_macro.rs | |||
@@ -118,6 +118,7 @@ register_builtin! { | |||
118 | EAGER: | 118 | EAGER: |
119 | (compile_error, CompileError) => compile_error_expand, | 119 | (compile_error, CompileError) => compile_error_expand, |
120 | (concat, Concat) => concat_expand, | 120 | (concat, Concat) => concat_expand, |
121 | (concat_idents, ConcatIdents) => concat_idents_expand, | ||
121 | (include, Include) => include_expand, | 122 | (include, Include) => include_expand, |
122 | (include_bytes, IncludeBytes) => include_bytes_expand, | 123 | (include_bytes, IncludeBytes) => include_bytes_expand, |
123 | (include_str, IncludeStr) => include_str_expand, | 124 | (include_str, IncludeStr) => include_str_expand, |
@@ -373,6 +374,28 @@ fn concat_expand( | |||
373 | ExpandResult { value: Some(ExpandedEager::new(quote!(#text), FragmentKind::Expr)), err } | 374 | ExpandResult { value: Some(ExpandedEager::new(quote!(#text), FragmentKind::Expr)), err } |
374 | } | 375 | } |
375 | 376 | ||
377 | fn concat_idents_expand( | ||
378 | _db: &dyn AstDatabase, | ||
379 | _arg_id: EagerMacroId, | ||
380 | tt: &tt::Subtree, | ||
381 | ) -> ExpandResult<Option<ExpandedEager>> { | ||
382 | let mut err = None; | ||
383 | let mut ident = String::new(); | ||
384 | for (i, t) in tt.token_trees.iter().enumerate() { | ||
385 | match t { | ||
386 | tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => { | ||
387 | ident.push_str(id.text.as_str()); | ||
388 | } | ||
389 | tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), | ||
390 | _ => { | ||
391 | err.get_or_insert(mbe::ExpandError::UnexpectedToken); | ||
392 | } | ||
393 | } | ||
394 | } | ||
395 | let ident = tt::Ident { text: ident.into(), id: tt::TokenId::unspecified() }; | ||
396 | ExpandResult { value: Some(ExpandedEager::new(quote!(#ident), FragmentKind::Expr)), err } | ||
397 | } | ||
398 | |||
376 | fn relative_file( | 399 | fn relative_file( |
377 | db: &dyn AstDatabase, | 400 | db: &dyn AstDatabase, |
378 | call_id: MacroCallId, | 401 | call_id: MacroCallId, |
@@ -794,4 +817,16 @@ mod tests { | |||
794 | expect![[r#""foor0bar\nfalse""#]], | 817 | expect![[r#""foor0bar\nfalse""#]], |
795 | ); | 818 | ); |
796 | } | 819 | } |
820 | |||
821 | #[test] | ||
822 | fn test_concat_idents_expand() { | ||
823 | check_expansion( | ||
824 | r##" | ||
825 | #[rustc_builtin_macro] | ||
826 | macro_rules! concat_idents {} | ||
827 | concat_idents!(foo, bar); | ||
828 | "##, | ||
829 | expect![[r#"foobar"#]], | ||
830 | ); | ||
831 | } | ||
797 | } | 832 | } |
diff --git a/crates/hir_expand/src/name.rs b/crates/hir_expand/src/name.rs index bcfd3e524..5a5dc9afd 100644 --- a/crates/hir_expand/src/name.rs +++ b/crates/hir_expand/src/name.rs | |||
@@ -212,6 +212,7 @@ pub mod known { | |||
212 | std_panic, | 212 | std_panic, |
213 | stringify, | 213 | stringify, |
214 | concat, | 214 | concat, |
215 | concat_idents, | ||
215 | include, | 216 | include, |
216 | include_bytes, | 217 | include_bytes, |
217 | include_str, | 218 | include_str, |
diff --git a/crates/hir_ty/src/diagnostics/match_check.rs b/crates/hir_ty/src/diagnostics/match_check.rs index e9762622f..6ee0529c6 100644 --- a/crates/hir_ty/src/diagnostics/match_check.rs +++ b/crates/hir_ty/src/diagnostics/match_check.rs | |||
@@ -1119,6 +1119,7 @@ fn main() { | |||
1119 | (true, false, true) => (), | 1119 | (true, false, true) => (), |
1120 | (true) => (), | 1120 | (true) => (), |
1121 | } | 1121 | } |
1122 | match (true, false) { (true,) => {} } | ||
1122 | match (0) { () => () } | 1123 | match (0) { () => () } |
1123 | match Unresolved::Bar { Unresolved::Baz => () } | 1124 | match Unresolved::Bar { Unresolved::Baz => () } |
1124 | } | 1125 | } |
diff --git a/crates/hir_ty/src/infer/pat.rs b/crates/hir_ty/src/infer/pat.rs index aea354cde..60b94a642 100644 --- a/crates/hir_ty/src/infer/pat.rs +++ b/crates/hir_ty/src/infer/pat.rs | |||
@@ -126,11 +126,12 @@ impl<'a> InferenceContext<'a> { | |||
126 | _ => &[], | 126 | _ => &[], |
127 | }; | 127 | }; |
128 | 128 | ||
129 | let (pre, post) = match ellipsis { | 129 | let ((pre, post), n_uncovered_patterns) = match ellipsis { |
130 | Some(idx) => args.split_at(idx), | 130 | Some(idx) => { |
131 | None => (&args[..], &[][..]), | 131 | (args.split_at(idx), expectations.len().saturating_sub(args.len())) |
132 | } | ||
133 | None => ((&args[..], &[][..]), 0), | ||
132 | }; | 134 | }; |
133 | let n_uncovered_patterns = expectations.len().saturating_sub(args.len()); | ||
134 | let err_ty = self.err_ty(); | 135 | let err_ty = self.err_ty(); |
135 | let mut expectations_iter = | 136 | let mut expectations_iter = |
136 | expectations.iter().map(|a| a.assert_ty_ref(&Interner)).chain(repeat(&err_ty)); | 137 | expectations.iter().map(|a| a.assert_ty_ref(&Interner)).chain(repeat(&err_ty)); |
diff --git a/crates/hir_ty/src/tests/patterns.rs b/crates/hir_ty/src/tests/patterns.rs index b36e77e91..787647e9f 100644 --- a/crates/hir_ty/src/tests/patterns.rs +++ b/crates/hir_ty/src/tests/patterns.rs | |||
@@ -732,7 +732,7 @@ fn foo(tuple: (u8, i16, f32)) { | |||
732 | 111..112 'a': u8 | 732 | 111..112 'a': u8 |
733 | 114..115 'b': i16 | 733 | 114..115 'b': i16 |
734 | 124..126 '{}': () | 734 | 124..126 '{}': () |
735 | 136..142 '(a, b)': (u8, i16, f32) | 735 | 136..142 '(a, b)': (u8, i16) |
736 | 137..138 'a': u8 | 736 | 137..138 'a': u8 |
737 | 140..141 'b': i16 | 737 | 140..141 'b': i16 |
738 | 146..161 '{/*too short*/}': () | 738 | 146..161 '{/*too short*/}': () |
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index cb5a8e19a..320694a17 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs | |||
@@ -29,7 +29,8 @@ pub(crate) type DocumentationLink = String; | |||
29 | /// Rewrite documentation links in markdown to point to an online host (e.g. docs.rs) | 29 | /// Rewrite documentation links in markdown to point to an online host (e.g. docs.rs) |
30 | pub(crate) fn rewrite_links(db: &RootDatabase, markdown: &str, definition: &Definition) -> String { | 30 | pub(crate) fn rewrite_links(db: &RootDatabase, markdown: &str, definition: &Definition) -> String { |
31 | let mut cb = broken_link_clone_cb; | 31 | let mut cb = broken_link_clone_cb; |
32 | let doc = Parser::new_with_broken_link_callback(markdown, Options::empty(), Some(&mut cb)); | 32 | let doc = |
33 | Parser::new_with_broken_link_callback(markdown, Options::ENABLE_TASKLISTS, Some(&mut cb)); | ||
33 | 34 | ||
34 | let doc = map_links(doc, |target, title: &str| { | 35 | let doc = map_links(doc, |target, title: &str| { |
35 | // This check is imperfect, there's some overlap between valid intra-doc links | 36 | // This check is imperfect, there's some overlap between valid intra-doc links |
@@ -64,8 +65,7 @@ pub(crate) fn rewrite_links(db: &RootDatabase, markdown: &str, definition: &Defi | |||
64 | pub(crate) fn remove_links(markdown: &str) -> String { | 65 | pub(crate) fn remove_links(markdown: &str) -> String { |
65 | let mut drop_link = false; | 66 | let mut drop_link = false; |
66 | 67 | ||
67 | let mut opts = Options::empty(); | 68 | let opts = Options::ENABLE_TASKLISTS | Options::ENABLE_FOOTNOTES; |
68 | opts.insert(Options::ENABLE_FOOTNOTES); | ||
69 | 69 | ||
70 | let mut cb = |_: BrokenLink| { | 70 | let mut cb = |_: BrokenLink| { |
71 | let empty = InlineStr::try_from("").unwrap(); | 71 | let empty = InlineStr::try_from("").unwrap(); |
@@ -123,7 +123,7 @@ pub(crate) fn extract_definitions_from_markdown( | |||
123 | ) -> Vec<(TextRange, String, Option<hir::Namespace>)> { | 123 | ) -> Vec<(TextRange, String, Option<hir::Namespace>)> { |
124 | Parser::new_with_broken_link_callback( | 124 | Parser::new_with_broken_link_callback( |
125 | markdown, | 125 | markdown, |
126 | Options::empty(), | 126 | Options::ENABLE_TASKLISTS, |
127 | Some(&mut broken_link_clone_cb), | 127 | Some(&mut broken_link_clone_cb), |
128 | ) | 128 | ) |
129 | .into_offset_iter() | 129 | .into_offset_iter() |
diff --git a/crates/ide/src/matching_brace.rs b/crates/ide/src/matching_brace.rs index 261dcc255..011c8cc55 100644 --- a/crates/ide/src/matching_brace.rs +++ b/crates/ide/src/matching_brace.rs | |||
@@ -19,14 +19,10 @@ use syntax::{ | |||
19 | pub(crate) fn matching_brace(file: &SourceFile, offset: TextSize) -> Option<TextSize> { | 19 | pub(crate) fn matching_brace(file: &SourceFile, offset: TextSize) -> Option<TextSize> { |
20 | const BRACES: &[SyntaxKind] = | 20 | const BRACES: &[SyntaxKind] = |
21 | &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>], T![|], T![|]]; | 21 | &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>], T![|], T![|]]; |
22 | let (brace_token, brace_idx) = file | 22 | let (brace_token, brace_idx) = file.syntax().token_at_offset(offset).find_map(|node| { |
23 | .syntax() | 23 | let idx = BRACES.iter().position(|&brace| brace == node.kind())?; |
24 | .token_at_offset(offset) | 24 | Some((node, idx)) |
25 | .filter_map(|node| { | 25 | })?; |
26 | let idx = BRACES.iter().position(|&brace| brace == node.kind())?; | ||
27 | Some((node, idx)) | ||
28 | }) | ||
29 | .next()?; | ||
30 | let parent = brace_token.parent()?; | 26 | let parent = brace_token.parent()?; |
31 | if brace_token.kind() == T![|] && !ast::ParamList::can_cast(parent.kind()) { | 27 | if brace_token.kind() == T![|] && !ast::ParamList::can_cast(parent.kind()) { |
32 | cov_mark::hit!(pipes_not_braces); | 28 | cov_mark::hit!(pipes_not_braces); |
diff --git a/crates/ide/src/parent_module.rs b/crates/ide/src/parent_module.rs index 99365c8a7..9b1f48044 100644 --- a/crates/ide/src/parent_module.rs +++ b/crates/ide/src/parent_module.rs | |||
@@ -1,6 +1,8 @@ | |||
1 | use hir::Semantics; | 1 | use hir::Semantics; |
2 | use ide_db::base_db::{CrateId, FileId, FilePosition}; | 2 | use ide_db::{ |
3 | use ide_db::RootDatabase; | 3 | base_db::{CrateId, FileId, FilePosition}, |
4 | RootDatabase, | ||
5 | }; | ||
4 | use itertools::Itertools; | 6 | use itertools::Itertools; |
5 | use syntax::{ | 7 | use syntax::{ |
6 | algo::find_node_at_offset, | 8 | algo::find_node_at_offset, |
diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index b586dcc17..baed8e217 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs | |||
@@ -227,8 +227,8 @@ pub(super) fn element( | |||
227 | k if k.is_keyword() => { | 227 | k if k.is_keyword() => { |
228 | let h = Highlight::new(HlTag::Keyword); | 228 | let h = Highlight::new(HlTag::Keyword); |
229 | match k { | 229 | match k { |
230 | T![await] | 230 | T![await] => h | HlMod::Async | HlMod::ControlFlow, |
231 | | T![break] | 231 | T![break] |
232 | | T![continue] | 232 | | T![continue] |
233 | | T![else] | 233 | | T![else] |
234 | | T![if] | 234 | | T![if] |
@@ -255,6 +255,7 @@ pub(super) fn element( | |||
255 | }) | 255 | }) |
256 | .map(|modifier| h | modifier) | 256 | .map(|modifier| h | modifier) |
257 | .unwrap_or(h), | 257 | .unwrap_or(h), |
258 | T![async] => h | HlMod::Async, | ||
258 | _ => h, | 259 | _ => h, |
259 | } | 260 | } |
260 | } | 261 | } |
@@ -310,6 +311,9 @@ fn highlight_def(db: &RootDatabase, def: Definition) -> Highlight { | |||
310 | if func.is_unsafe(db) { | 311 | if func.is_unsafe(db) { |
311 | h |= HlMod::Unsafe; | 312 | h |= HlMod::Unsafe; |
312 | } | 313 | } |
314 | if func.is_async(db) { | ||
315 | h |= HlMod::Async; | ||
316 | } | ||
313 | return h; | 317 | return h; |
314 | } | 318 | } |
315 | hir::ModuleDef::Adt(hir::Adt::Struct(_)) => HlTag::Symbol(SymbolKind::Struct), | 319 | hir::ModuleDef::Adt(hir::Adt::Struct(_)) => HlTag::Symbol(SymbolKind::Struct), |
@@ -409,6 +413,9 @@ fn highlight_method_call( | |||
409 | if func.is_unsafe(sema.db) || sema.is_unsafe_method_call(&method_call) { | 413 | if func.is_unsafe(sema.db) || sema.is_unsafe_method_call(&method_call) { |
410 | h |= HlMod::Unsafe; | 414 | h |= HlMod::Unsafe; |
411 | } | 415 | } |
416 | if func.is_async(sema.db) { | ||
417 | h |= HlMod::Async; | ||
418 | } | ||
412 | if func.as_assoc_item(sema.db).and_then(|it| it.containing_trait(sema.db)).is_some() { | 419 | if func.as_assoc_item(sema.db).and_then(|it| it.containing_trait(sema.db)).is_some() { |
413 | h |= HlMod::Trait | 420 | h |= HlMod::Trait |
414 | } | 421 | } |
diff --git a/crates/ide/src/syntax_highlighting/tags.rs b/crates/ide/src/syntax_highlighting/tags.rs index a304b3250..f4a2e7506 100644 --- a/crates/ide/src/syntax_highlighting/tags.rs +++ b/crates/ide/src/syntax_highlighting/tags.rs | |||
@@ -65,6 +65,8 @@ pub enum HlMod { | |||
65 | Static, | 65 | Static, |
66 | /// Used for items in traits and trait impls. | 66 | /// Used for items in traits and trait impls. |
67 | Trait, | 67 | Trait, |
68 | /// Used with keywords like `async` and `await`. | ||
69 | Async, | ||
68 | // Keep this last! | 70 | // Keep this last! |
69 | /// Used for unsafe functions, mutable statics, union accesses and unsafe operations. | 71 | /// Used for unsafe functions, mutable statics, union accesses and unsafe operations. |
70 | Unsafe, | 72 | Unsafe, |
@@ -186,6 +188,7 @@ impl HlMod { | |||
186 | HlMod::Mutable, | 188 | HlMod::Mutable, |
187 | HlMod::Static, | 189 | HlMod::Static, |
188 | HlMod::Trait, | 190 | HlMod::Trait, |
191 | HlMod::Async, | ||
189 | HlMod::Unsafe, | 192 | HlMod::Unsafe, |
190 | ]; | 193 | ]; |
191 | 194 | ||
@@ -203,6 +206,7 @@ impl HlMod { | |||
203 | HlMod::Mutable => "mutable", | 206 | HlMod::Mutable => "mutable", |
204 | HlMod::Static => "static", | 207 | HlMod::Static => "static", |
205 | HlMod::Trait => "trait", | 208 | HlMod::Trait => "trait", |
209 | HlMod::Async => "async", | ||
206 | HlMod::Unsafe => "unsafe", | 210 | HlMod::Unsafe => "unsafe", |
207 | } | 211 | } |
208 | } | 212 | } |
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html index 8d83ba206..921a956e6 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html | |||
@@ -37,13 +37,25 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
37 | 37 | ||
38 | .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } | 38 | .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } |
39 | </style> | 39 | </style> |
40 | <pre><code><span class="comment documentation">/// ```</span> | 40 | <pre><code><span class="comment documentation">//! This is a module to test doc injection.</span> |
41 | <span class="comment documentation">//! ```</span> | ||
42 | <span class="comment documentation">//! </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">test</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span> | ||
43 | <span class="comment documentation">//! ```</span> | ||
44 | |||
45 | <span class="comment documentation">/// ```</span> | ||
41 | <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"early doctests should not go boom"</span><span class="semicolon injected">;</span> | 46 | <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"early doctests should not go boom"</span><span class="semicolon injected">;</span> |
42 | <span class="comment documentation">/// ```</span> | 47 | <span class="comment documentation">/// ```</span> |
43 | <span class="keyword">struct</span> <span class="struct declaration">Foo</span> <span class="brace">{</span> | 48 | <span class="keyword">struct</span> <span class="struct declaration">Foo</span> <span class="brace">{</span> |
44 | <span class="field declaration">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span><span class="comma">,</span> | 49 | <span class="field declaration">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span><span class="comma">,</span> |
45 | <span class="brace">}</span> | 50 | <span class="brace">}</span> |
46 | 51 | ||
52 | <span class="comment documentation">/// This is an impl with a code block.</span> | ||
53 | <span class="comment documentation">///</span> | ||
54 | <span class="comment documentation">/// ```</span> | ||
55 | <span class="comment documentation">/// </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span> | ||
56 | <span class="comment documentation">///</span> | ||
57 | <span class="comment documentation">/// </span><span class="brace injected">}</span> | ||
58 | <span class="comment documentation">/// ```</span> | ||
47 | <span class="keyword">impl</span> <span class="struct">Foo</span> <span class="brace">{</span> | 59 | <span class="keyword">impl</span> <span class="struct">Foo</span> <span class="brace">{</span> |
48 | <span class="comment documentation">/// ```</span> | 60 | <span class="comment documentation">/// ```</span> |
49 | <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Call me</span> | 61 | <span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Call me</span> |
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlighting.html b/crates/ide/src/syntax_highlighting/test_data/highlighting.html index df4192194..33bc6b0f3 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlighting.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlighting.html | |||
@@ -234,4 +234,15 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
234 | <span class="variable declaration">Nope</span> <span class="operator">=></span> <span class="variable">Nope</span><span class="comma">,</span> | 234 | <span class="variable declaration">Nope</span> <span class="operator">=></span> <span class="variable">Nope</span><span class="comma">,</span> |
235 | <span class="brace">}</span> | 235 | <span class="brace">}</span> |
236 | <span class="brace">}</span> | 236 | <span class="brace">}</span> |
237 | <span class="brace">}</span> | ||
238 | |||
239 | <span class="keyword async">async</span> <span class="keyword">fn</span> <span class="function declaration async">learn_and_sing</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span> | ||
240 | <span class="keyword">let</span> <span class="variable declaration">song</span> <span class="operator">=</span> <span class="unresolved_reference">learn_song</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="operator">.</span><span class="keyword control async">await</span><span class="semicolon">;</span> | ||
241 | <span class="unresolved_reference">sing_song</span><span class="parenthesis">(</span><span class="variable consuming">song</span><span class="parenthesis">)</span><span class="operator">.</span><span class="keyword control async">await</span><span class="semicolon">;</span> | ||
242 | <span class="brace">}</span> | ||
243 | |||
244 | <span class="keyword async">async</span> <span class="keyword">fn</span> <span class="function declaration async">async_main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span> | ||
245 | <span class="keyword">let</span> <span class="variable declaration">f1</span> <span class="operator">=</span> <span class="function async">learn_and_sing</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span> | ||
246 | <span class="keyword">let</span> <span class="variable declaration">f2</span> <span class="operator">=</span> <span class="unresolved_reference">dance</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span> | ||
247 | futures::<span class="macro">join!</span><span class="parenthesis">(</span>f1<span class="comma">,</span> f2<span class="parenthesis">)</span><span class="semicolon">;</span> | ||
237 | <span class="brace">}</span></code></pre> \ No newline at end of file | 248 | <span class="brace">}</span></code></pre> \ No newline at end of file |
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index b6e952b08..95408dfb2 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs | |||
@@ -208,6 +208,17 @@ impl<T> Option<T> { | |||
208 | } | 208 | } |
209 | } | 209 | } |
210 | } | 210 | } |
211 | |||
212 | async fn learn_and_sing() { | ||
213 | let song = learn_song().await; | ||
214 | sing_song(song).await; | ||
215 | } | ||
216 | |||
217 | async fn async_main() { | ||
218 | let f1 = learn_and_sing(); | ||
219 | let f2 = dance(); | ||
220 | futures::join!(f1, f2); | ||
221 | } | ||
211 | "# | 222 | "# |
212 | .trim(), | 223 | .trim(), |
213 | expect_file!["./test_data/highlighting.html"], | 224 | expect_file!["./test_data/highlighting.html"], |
@@ -513,6 +524,11 @@ fn main() { | |||
513 | fn test_highlight_doc_comment() { | 524 | fn test_highlight_doc_comment() { |
514 | check_highlighting( | 525 | check_highlighting( |
515 | r#" | 526 | r#" |
527 | //! This is a module to test doc injection. | ||
528 | //! ``` | ||
529 | //! fn test() {} | ||
530 | //! ``` | ||
531 | |||
516 | /// ``` | 532 | /// ``` |
517 | /// let _ = "early doctests should not go boom"; | 533 | /// let _ = "early doctests should not go boom"; |
518 | /// ``` | 534 | /// ``` |
@@ -520,6 +536,13 @@ struct Foo { | |||
520 | bar: bool, | 536 | bar: bool, |
521 | } | 537 | } |
522 | 538 | ||
539 | /// This is an impl with a code block. | ||
540 | /// | ||
541 | /// ``` | ||
542 | /// fn foo() { | ||
543 | /// | ||
544 | /// } | ||
545 | /// ``` | ||
523 | impl Foo { | 546 | impl Foo { |
524 | /// ``` | 547 | /// ``` |
525 | /// let _ = "Call me | 548 | /// let _ = "Call me |
diff --git a/crates/ide_assists/src/handlers/add_missing_impl_members.rs b/crates/ide_assists/src/handlers/add_missing_impl_members.rs index 0148635f9..8225ae22c 100644 --- a/crates/ide_assists/src/handlers/add_missing_impl_members.rs +++ b/crates/ide_assists/src/handlers/add_missing_impl_members.rs | |||
@@ -64,7 +64,6 @@ pub(crate) fn add_missing_impl_members(acc: &mut Assists, ctx: &AssistContext) - | |||
64 | // impl Trait for () { | 64 | // impl Trait for () { |
65 | // type X = (); | 65 | // type X = (); |
66 | // fn foo(&self) {}$0 | 66 | // fn foo(&self) {}$0 |
67 | // | ||
68 | // } | 67 | // } |
69 | // ``` | 68 | // ``` |
70 | // -> | 69 | // -> |
@@ -195,6 +194,7 @@ impl Foo for S { | |||
195 | fn baz(&self) { | 194 | fn baz(&self) { |
196 | todo!() | 195 | todo!() |
197 | } | 196 | } |
197 | |||
198 | }"#, | 198 | }"#, |
199 | ); | 199 | ); |
200 | } | 200 | } |
@@ -231,6 +231,7 @@ impl Foo for S { | |||
231 | fn foo(&self) { | 231 | fn foo(&self) { |
232 | ${0:todo!()} | 232 | ${0:todo!()} |
233 | } | 233 | } |
234 | |||
234 | }"#, | 235 | }"#, |
235 | ); | 236 | ); |
236 | } | 237 | } |
diff --git a/crates/ide_assists/src/handlers/auto_import.rs b/crates/ide_assists/src/handlers/auto_import.rs index a454a2af3..506cc292c 100644 --- a/crates/ide_assists/src/handlers/auto_import.rs +++ b/crates/ide_assists/src/handlers/auto_import.rs | |||
@@ -969,4 +969,30 @@ mod bar { | |||
969 | "#, | 969 | "#, |
970 | ); | 970 | ); |
971 | } | 971 | } |
972 | |||
973 | #[test] | ||
974 | fn uses_abs_path_with_extern_crate_clash() { | ||
975 | check_assist( | ||
976 | auto_import, | ||
977 | r#" | ||
978 | //- /main.rs crate:main deps:foo | ||
979 | mod foo {} | ||
980 | |||
981 | const _: () = { | ||
982 | Foo$0 | ||
983 | }; | ||
984 | //- /foo.rs crate:foo | ||
985 | pub struct Foo | ||
986 | "#, | ||
987 | r#" | ||
988 | use ::foo::Foo; | ||
989 | |||
990 | mod foo {} | ||
991 | |||
992 | const _: () = { | ||
993 | Foo | ||
994 | }; | ||
995 | "#, | ||
996 | ); | ||
997 | } | ||
972 | } | 998 | } |
diff --git a/crates/ide_assists/src/handlers/generate_new.rs b/crates/ide_assists/src/handlers/generate_new.rs index 8ce5930b7..959a1f86c 100644 --- a/crates/ide_assists/src/handlers/generate_new.rs +++ b/crates/ide_assists/src/handlers/generate_new.rs | |||
@@ -1,4 +1,3 @@ | |||
1 | use ast::Adt; | ||
2 | use itertools::Itertools; | 1 | use itertools::Itertools; |
3 | use stdx::format_to; | 2 | use stdx::format_to; |
4 | use syntax::ast::{self, AstNode, NameOwner, StructKind, VisibilityOwner}; | 3 | use syntax::ast::{self, AstNode, NameOwner, StructKind, VisibilityOwner}; |
@@ -37,7 +36,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext) -> Option<()> | |||
37 | }; | 36 | }; |
38 | 37 | ||
39 | // Return early if we've found an existing new fn | 38 | // Return early if we've found an existing new fn |
40 | let impl_def = find_struct_impl(&ctx, &Adt::Struct(strukt.clone()), "new")?; | 39 | let impl_def = find_struct_impl(&ctx, &ast::Adt::Struct(strukt.clone()), "new")?; |
41 | 40 | ||
42 | let target = strukt.syntax().text_range(); | 41 | let target = strukt.syntax().text_range(); |
43 | acc.add(AssistId("generate_new", AssistKind::Generate), "Generate `new`", target, |builder| { | 42 | acc.add(AssistId("generate_new", AssistKind::Generate), "Generate `new`", target, |builder| { |
@@ -60,7 +59,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext) -> Option<()> | |||
60 | let start_offset = impl_def | 59 | let start_offset = impl_def |
61 | .and_then(|impl_def| find_impl_block_start(impl_def, &mut buf)) | 60 | .and_then(|impl_def| find_impl_block_start(impl_def, &mut buf)) |
62 | .unwrap_or_else(|| { | 61 | .unwrap_or_else(|| { |
63 | buf = generate_impl_text(&Adt::Struct(strukt.clone()), &buf); | 62 | buf = generate_impl_text(&ast::Adt::Struct(strukt.clone()), &buf); |
64 | strukt.syntax().text_range().end() | 63 | strukt.syntax().text_range().end() |
65 | }); | 64 | }); |
66 | 65 | ||
@@ -81,101 +80,132 @@ mod tests { | |||
81 | use super::*; | 80 | use super::*; |
82 | 81 | ||
83 | #[test] | 82 | #[test] |
84 | #[rustfmt::skip] | ||
85 | fn test_generate_new() { | 83 | fn test_generate_new() { |
86 | // Check output of generation | ||
87 | check_assist( | 84 | check_assist( |
88 | generate_new, | 85 | generate_new, |
89 | "struct Foo {$0}", | 86 | r#" |
90 | "struct Foo {} | 87 | struct Foo {$0} |
88 | "#, | ||
89 | r#" | ||
90 | struct Foo {} | ||
91 | 91 | ||
92 | impl Foo { | 92 | impl Foo { |
93 | fn $0new() -> Self { Self { } } | 93 | fn $0new() -> Self { Self { } } |
94 | }", | 94 | } |
95 | "#, | ||
95 | ); | 96 | ); |
96 | check_assist( | 97 | check_assist( |
97 | generate_new, | 98 | generate_new, |
98 | "struct Foo<T: Clone> {$0}", | 99 | r#" |
99 | "struct Foo<T: Clone> {} | 100 | struct Foo<T: Clone> {$0} |
101 | "#, | ||
102 | r#" | ||
103 | struct Foo<T: Clone> {} | ||
100 | 104 | ||
101 | impl<T: Clone> Foo<T> { | 105 | impl<T: Clone> Foo<T> { |
102 | fn $0new() -> Self { Self { } } | 106 | fn $0new() -> Self { Self { } } |
103 | }", | 107 | } |
108 | "#, | ||
104 | ); | 109 | ); |
105 | check_assist( | 110 | check_assist( |
106 | generate_new, | 111 | generate_new, |
107 | "struct Foo<'a, T: Foo<'a>> {$0}", | 112 | r#" |
108 | "struct Foo<'a, T: Foo<'a>> {} | 113 | struct Foo<'a, T: Foo<'a>> {$0} |
114 | "#, | ||
115 | r#" | ||
116 | struct Foo<'a, T: Foo<'a>> {} | ||
109 | 117 | ||
110 | impl<'a, T: Foo<'a>> Foo<'a, T> { | 118 | impl<'a, T: Foo<'a>> Foo<'a, T> { |
111 | fn $0new() -> Self { Self { } } | 119 | fn $0new() -> Self { Self { } } |
112 | }", | 120 | } |
121 | "#, | ||
113 | ); | 122 | ); |
114 | check_assist( | 123 | check_assist( |
115 | generate_new, | 124 | generate_new, |
116 | "struct Foo { baz: String $0}", | 125 | r#" |
117 | "struct Foo { baz: String } | 126 | struct Foo { baz: String $0} |
127 | "#, | ||
128 | r#" | ||
129 | struct Foo { baz: String } | ||
118 | 130 | ||
119 | impl Foo { | 131 | impl Foo { |
120 | fn $0new(baz: String) -> Self { Self { baz } } | 132 | fn $0new(baz: String) -> Self { Self { baz } } |
121 | }", | 133 | } |
134 | "#, | ||
122 | ); | 135 | ); |
123 | check_assist( | 136 | check_assist( |
124 | generate_new, | 137 | generate_new, |
125 | "struct Foo { baz: String, qux: Vec<i32> $0}", | 138 | r#" |
126 | "struct Foo { baz: String, qux: Vec<i32> } | 139 | struct Foo { baz: String, qux: Vec<i32> $0} |
140 | "#, | ||
141 | r#" | ||
142 | struct Foo { baz: String, qux: Vec<i32> } | ||
127 | 143 | ||
128 | impl Foo { | 144 | impl Foo { |
129 | fn $0new(baz: String, qux: Vec<i32>) -> Self { Self { baz, qux } } | 145 | fn $0new(baz: String, qux: Vec<i32>) -> Self { Self { baz, qux } } |
130 | }", | 146 | } |
147 | "#, | ||
131 | ); | 148 | ); |
149 | } | ||
132 | 150 | ||
133 | // Check that visibility modifiers don't get brought in for fields | 151 | #[test] |
152 | fn check_that_visibility_modifiers_dont_get_brought_in() { | ||
134 | check_assist( | 153 | check_assist( |
135 | generate_new, | 154 | generate_new, |
136 | "struct Foo { pub baz: String, pub qux: Vec<i32> $0}", | 155 | r#" |
137 | "struct Foo { pub baz: String, pub qux: Vec<i32> } | 156 | struct Foo { pub baz: String, pub qux: Vec<i32> $0} |
157 | "#, | ||
158 | r#" | ||
159 | struct Foo { pub baz: String, pub qux: Vec<i32> } | ||
138 | 160 | ||
139 | impl Foo { | 161 | impl Foo { |
140 | fn $0new(baz: String, qux: Vec<i32>) -> Self { Self { baz, qux } } | 162 | fn $0new(baz: String, qux: Vec<i32>) -> Self { Self { baz, qux } } |
141 | }", | 163 | } |
164 | "#, | ||
142 | ); | 165 | ); |
166 | } | ||
143 | 167 | ||
144 | // Check that it reuses existing impls | 168 | #[test] |
169 | fn check_it_reuses_existing_impls() { | ||
145 | check_assist( | 170 | check_assist( |
146 | generate_new, | 171 | generate_new, |
147 | "struct Foo {$0} | 172 | r#" |
173 | struct Foo {$0} | ||
148 | 174 | ||
149 | impl Foo {} | 175 | impl Foo {} |
150 | ", | 176 | "#, |
151 | "struct Foo {} | 177 | r#" |
178 | struct Foo {} | ||
152 | 179 | ||
153 | impl Foo { | 180 | impl Foo { |
154 | fn $0new() -> Self { Self { } } | 181 | fn $0new() -> Self { Self { } } |
155 | } | 182 | } |
156 | ", | 183 | "#, |
157 | ); | 184 | ); |
158 | check_assist( | 185 | check_assist( |
159 | generate_new, | 186 | generate_new, |
160 | "struct Foo {$0} | 187 | r#" |
188 | struct Foo {$0} | ||
161 | 189 | ||
162 | impl Foo { | 190 | impl Foo { |
163 | fn qux(&self) {} | 191 | fn qux(&self) {} |
164 | } | 192 | } |
165 | ", | 193 | "#, |
166 | "struct Foo {} | 194 | r#" |
195 | struct Foo {} | ||
167 | 196 | ||
168 | impl Foo { | 197 | impl Foo { |
169 | fn $0new() -> Self { Self { } } | 198 | fn $0new() -> Self { Self { } } |
170 | 199 | ||
171 | fn qux(&self) {} | 200 | fn qux(&self) {} |
172 | } | 201 | } |
173 | ", | 202 | "#, |
174 | ); | 203 | ); |
175 | 204 | ||
176 | check_assist( | 205 | check_assist( |
177 | generate_new, | 206 | generate_new, |
178 | "struct Foo {$0} | 207 | r#" |
208 | struct Foo {$0} | ||
179 | 209 | ||
180 | impl Foo { | 210 | impl Foo { |
181 | fn qux(&self) {} | 211 | fn qux(&self) {} |
@@ -183,8 +213,9 @@ impl Foo { | |||
183 | 5 | 213 | 5 |
184 | } | 214 | } |
185 | } | 215 | } |
186 | ", | 216 | "#, |
187 | "struct Foo {} | 217 | r#" |
218 | struct Foo {} | ||
188 | 219 | ||
189 | impl Foo { | 220 | impl Foo { |
190 | fn $0new() -> Self { Self { } } | 221 | fn $0new() -> Self { Self { } } |
@@ -194,27 +225,37 @@ impl Foo { | |||
194 | 5 | 225 | 5 |
195 | } | 226 | } |
196 | } | 227 | } |
197 | ", | 228 | "#, |
198 | ); | 229 | ); |
230 | } | ||
199 | 231 | ||
200 | // Check visibility of new fn based on struct | 232 | #[test] |
233 | fn check_visibility_of_new_fn_based_on_struct() { | ||
201 | check_assist( | 234 | check_assist( |
202 | generate_new, | 235 | generate_new, |
203 | "pub struct Foo {$0}", | 236 | r#" |
204 | "pub struct Foo {} | 237 | pub struct Foo {$0} |
238 | "#, | ||
239 | r#" | ||
240 | pub struct Foo {} | ||
205 | 241 | ||
206 | impl Foo { | 242 | impl Foo { |
207 | pub fn $0new() -> Self { Self { } } | 243 | pub fn $0new() -> Self { Self { } } |
208 | }", | 244 | } |
245 | "#, | ||
209 | ); | 246 | ); |
210 | check_assist( | 247 | check_assist( |
211 | generate_new, | 248 | generate_new, |
212 | "pub(crate) struct Foo {$0}", | 249 | r#" |
213 | "pub(crate) struct Foo {} | 250 | pub(crate) struct Foo {$0} |
251 | "#, | ||
252 | r#" | ||
253 | pub(crate) struct Foo {} | ||
214 | 254 | ||
215 | impl Foo { | 255 | impl Foo { |
216 | pub(crate) fn $0new() -> Self { Self { } } | 256 | pub(crate) fn $0new() -> Self { Self { } } |
217 | }", | 257 | } |
258 | "#, | ||
218 | ); | 259 | ); |
219 | } | 260 | } |
220 | 261 | ||
@@ -222,26 +263,28 @@ impl Foo { | |||
222 | fn generate_new_not_applicable_if_fn_exists() { | 263 | fn generate_new_not_applicable_if_fn_exists() { |
223 | check_assist_not_applicable( | 264 | check_assist_not_applicable( |
224 | generate_new, | 265 | generate_new, |
225 | " | 266 | r#" |
226 | struct Foo {$0} | 267 | struct Foo {$0} |
227 | 268 | ||
228 | impl Foo { | 269 | impl Foo { |
229 | fn new() -> Self { | 270 | fn new() -> Self { |
230 | Self | 271 | Self |
231 | } | 272 | } |
232 | }", | 273 | } |
274 | "#, | ||
233 | ); | 275 | ); |
234 | 276 | ||
235 | check_assist_not_applicable( | 277 | check_assist_not_applicable( |
236 | generate_new, | 278 | generate_new, |
237 | " | 279 | r#" |
238 | struct Foo {$0} | 280 | struct Foo {$0} |
239 | 281 | ||
240 | impl Foo { | 282 | impl Foo { |
241 | fn New() -> Self { | 283 | fn New() -> Self { |
242 | Self | 284 | Self |
243 | } | 285 | } |
244 | }", | 286 | } |
287 | "#, | ||
245 | ); | 288 | ); |
246 | } | 289 | } |
247 | 290 | ||
@@ -249,12 +292,12 @@ impl Foo { | |||
249 | fn generate_new_target() { | 292 | fn generate_new_target() { |
250 | check_assist_target( | 293 | check_assist_target( |
251 | generate_new, | 294 | generate_new, |
252 | " | 295 | r#" |
253 | struct SomeThingIrrelevant; | 296 | struct SomeThingIrrelevant; |
254 | /// Has a lifetime parameter | 297 | /// Has a lifetime parameter |
255 | struct Foo<'a, T: Foo<'a>> {$0} | 298 | struct Foo<'a, T: Foo<'a>> {$0} |
256 | struct EvenMoreIrrelevant; | 299 | struct EvenMoreIrrelevant; |
257 | ", | 300 | "#, |
258 | "/// Has a lifetime parameter | 301 | "/// Has a lifetime parameter |
259 | struct Foo<'a, T: Foo<'a>> {}", | 302 | struct Foo<'a, T: Foo<'a>> {}", |
260 | ); | 303 | ); |
@@ -264,7 +307,7 @@ struct Foo<'a, T: Foo<'a>> {}", | |||
264 | fn test_unrelated_new() { | 307 | fn test_unrelated_new() { |
265 | check_assist( | 308 | check_assist( |
266 | generate_new, | 309 | generate_new, |
267 | r##" | 310 | r#" |
268 | pub struct AstId<N: AstNode> { | 311 | pub struct AstId<N: AstNode> { |
269 | file_id: HirFileId, | 312 | file_id: HirFileId, |
270 | file_ast_id: FileAstId<N>, | 313 | file_ast_id: FileAstId<N>, |
@@ -285,8 +328,9 @@ impl<T> Source<T> { | |||
285 | pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> { | 328 | pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> { |
286 | Source { file_id: self.file_id, ast: f(self.ast) } | 329 | Source { file_id: self.file_id, ast: f(self.ast) } |
287 | } | 330 | } |
288 | }"##, | 331 | } |
289 | r##" | 332 | "#, |
333 | r#" | ||
290 | pub struct AstId<N: AstNode> { | 334 | pub struct AstId<N: AstNode> { |
291 | file_id: HirFileId, | 335 | file_id: HirFileId, |
292 | file_ast_id: FileAstId<N>, | 336 | file_ast_id: FileAstId<N>, |
@@ -309,7 +353,8 @@ impl<T> Source<T> { | |||
309 | pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> { | 353 | pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> { |
310 | Source { file_id: self.file_id, ast: f(self.ast) } | 354 | Source { file_id: self.file_id, ast: f(self.ast) } |
311 | } | 355 | } |
312 | }"##, | 356 | } |
357 | "#, | ||
313 | ); | 358 | ); |
314 | } | 359 | } |
315 | } | 360 | } |
diff --git a/crates/ide_assists/src/handlers/move_module_to_file.rs b/crates/ide_assists/src/handlers/move_module_to_file.rs index 6e685b4b2..93f702c55 100644 --- a/crates/ide_assists/src/handlers/move_module_to_file.rs +++ b/crates/ide_assists/src/handlers/move_module_to_file.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use ast::{edit::IndentLevel, VisibilityOwner}; | 1 | use ast::edit::IndentLevel; |
2 | use ide_db::base_db::AnchoredPathBuf; | 2 | use ide_db::base_db::AnchoredPathBuf; |
3 | use stdx::format_to; | 3 | use stdx::format_to; |
4 | use syntax::{ | 4 | use syntax::{ |
@@ -60,12 +60,18 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext) -> Opt | |||
60 | }; | 60 | }; |
61 | 61 | ||
62 | let mut buf = String::new(); | 62 | let mut buf = String::new(); |
63 | if let Some(v) = module_ast.visibility() { | ||
64 | format_to!(buf, "{} ", v); | ||
65 | } | ||
66 | format_to!(buf, "mod {};", module_name); | 63 | format_to!(buf, "mod {};", module_name); |
67 | 64 | ||
68 | builder.replace(module_ast.syntax().text_range(), buf); | 65 | let replacement_start = if let Some(mod_token) = module_ast.mod_token() { |
66 | mod_token.text_range().start() | ||
67 | } else { | ||
68 | module_ast.syntax().text_range().start() | ||
69 | }; | ||
70 | |||
71 | builder.replace( | ||
72 | TextRange::new(replacement_start, module_ast.syntax().text_range().end()), | ||
73 | buf, | ||
74 | ); | ||
69 | 75 | ||
70 | let dst = AnchoredPathBuf { anchor: ctx.frange.file_id, path }; | 76 | let dst = AnchoredPathBuf { anchor: ctx.frange.file_id, path }; |
71 | builder.create_file(dst, contents); | 77 | builder.create_file(dst, contents); |
@@ -184,4 +190,26 @@ pub(crate) mod tests; | |||
184 | cov_mark::check!(available_before_curly); | 190 | cov_mark::check!(available_before_curly); |
185 | check_assist_not_applicable(move_module_to_file, r#"mod m { $0 }"#); | 191 | check_assist_not_applicable(move_module_to_file, r#"mod m { $0 }"#); |
186 | } | 192 | } |
193 | |||
194 | #[test] | ||
195 | fn keep_outer_comments_and_attributes() { | ||
196 | check_assist( | ||
197 | move_module_to_file, | ||
198 | r#" | ||
199 | /// doc comment | ||
200 | #[attribute] | ||
201 | mod $0tests { | ||
202 | #[test] fn t() {} | ||
203 | } | ||
204 | "#, | ||
205 | r#" | ||
206 | //- /main.rs | ||
207 | /// doc comment | ||
208 | #[attribute] | ||
209 | mod tests; | ||
210 | //- /tests.rs | ||
211 | #[test] fn t() {} | ||
212 | "#, | ||
213 | ); | ||
214 | } | ||
187 | } | 215 | } |
diff --git a/crates/ide_assists/src/handlers/replace_unwrap_with_match.rs b/crates/ide_assists/src/handlers/replace_unwrap_with_match.rs index 0fec961b4..a3bfa221c 100644 --- a/crates/ide_assists/src/handlers/replace_unwrap_with_match.rs +++ b/crates/ide_assists/src/handlers/replace_unwrap_with_match.rs | |||
@@ -17,7 +17,7 @@ use ide_db::ty_filter::TryEnum; | |||
17 | 17 | ||
18 | // Assist: replace_unwrap_with_match | 18 | // Assist: replace_unwrap_with_match |
19 | // | 19 | // |
20 | // Replaces `unwrap` a `match` expression. Works for Result and Option. | 20 | // Replaces `unwrap` with a `match` expression. Works for Result and Option. |
21 | // | 21 | // |
22 | // ``` | 22 | // ``` |
23 | // enum Result<T, E> { Ok(T), Err(E) } | 23 | // enum Result<T, E> { Ok(T), Err(E) } |
diff --git a/crates/ide_assists/src/tests/generated.rs b/crates/ide_assists/src/tests/generated.rs index 49c1a9776..4406406a2 100644 --- a/crates/ide_assists/src/tests/generated.rs +++ b/crates/ide_assists/src/tests/generated.rs | |||
@@ -50,7 +50,6 @@ trait Trait { | |||
50 | impl Trait for () { | 50 | impl Trait for () { |
51 | type X = (); | 51 | type X = (); |
52 | fn foo(&self) {}$0 | 52 | fn foo(&self) {}$0 |
53 | |||
54 | } | 53 | } |
55 | "#####, | 54 | "#####, |
56 | r#####" | 55 | r#####" |
diff --git a/crates/ide_assists/src/utils.rs b/crates/ide_assists/src/utils.rs index 0dcf20c61..fc7caee04 100644 --- a/crates/ide_assists/src/utils.rs +++ b/crates/ide_assists/src/utils.rs | |||
@@ -17,7 +17,7 @@ use syntax::{ | |||
17 | ast::AttrsOwner, | 17 | ast::AttrsOwner, |
18 | ast::NameOwner, | 18 | ast::NameOwner, |
19 | ast::{self, edit, make, ArgListOwner, GenericParamsOwner}, | 19 | ast::{self, edit, make, ArgListOwner, GenericParamsOwner}, |
20 | AstNode, Direction, SmolStr, | 20 | ted, AstNode, Direction, SmolStr, |
21 | SyntaxKind::*, | 21 | SyntaxKind::*, |
22 | SyntaxNode, TextSize, T, | 22 | SyntaxNode, TextSize, T, |
23 | }; | 23 | }; |
@@ -128,43 +128,43 @@ pub fn add_trait_assoc_items_to_impl( | |||
128 | sema: &hir::Semantics<ide_db::RootDatabase>, | 128 | sema: &hir::Semantics<ide_db::RootDatabase>, |
129 | items: Vec<ast::AssocItem>, | 129 | items: Vec<ast::AssocItem>, |
130 | trait_: hir::Trait, | 130 | trait_: hir::Trait, |
131 | impl_def: ast::Impl, | 131 | impl_: ast::Impl, |
132 | target_scope: hir::SemanticsScope, | 132 | target_scope: hir::SemanticsScope, |
133 | ) -> (ast::Impl, ast::AssocItem) { | 133 | ) -> (ast::Impl, ast::AssocItem) { |
134 | let impl_item_list = impl_def.assoc_item_list().unwrap_or_else(make::assoc_item_list); | ||
135 | |||
136 | let n_existing_items = impl_item_list.assoc_items().count(); | ||
137 | let source_scope = sema.scope_for_def(trait_); | 134 | let source_scope = sema.scope_for_def(trait_); |
138 | let ast_transform = QualifyPaths::new(&target_scope, &source_scope) | 135 | let ast_transform = QualifyPaths::new(&target_scope, &source_scope) |
139 | .or(SubstituteTypeParams::for_trait_impl(&source_scope, trait_, impl_def.clone())); | 136 | .or(SubstituteTypeParams::for_trait_impl(&source_scope, trait_, impl_.clone())); |
140 | 137 | ||
141 | let items = items | 138 | let items = items |
142 | .into_iter() | 139 | .into_iter() |
143 | .map(|it| it.clone_for_update()) | 140 | .map(|it| it.clone_for_update()) |
144 | .inspect(|it| ast_transform::apply(&*ast_transform, it)) | 141 | .inspect(|it| ast_transform::apply(&*ast_transform, it)) |
145 | .map(|it| match it { | 142 | .map(|it| edit::remove_attrs_and_docs(&it).clone_subtree().clone_for_update()); |
146 | ast::AssocItem::Fn(def) => ast::AssocItem::Fn(add_body(def)), | 143 | |
147 | ast::AssocItem::TypeAlias(def) => ast::AssocItem::TypeAlias(def.remove_bounds()), | 144 | let res = impl_.clone_for_update(); |
148 | _ => it, | 145 | |
149 | }) | 146 | let assoc_item_list = res.get_or_create_assoc_item_list(); |
150 | .map(|it| edit::remove_attrs_and_docs(&it)); | 147 | let mut first_item = None; |
151 | 148 | for item in items { | |
152 | let new_impl_item_list = impl_item_list.append_items(items); | 149 | first_item.get_or_insert_with(|| item.clone()); |
153 | let new_impl_def = impl_def.with_assoc_item_list(new_impl_item_list); | 150 | match &item { |
154 | let first_new_item = | 151 | ast::AssocItem::Fn(fn_) if fn_.body().is_none() => { |
155 | new_impl_def.assoc_item_list().unwrap().assoc_items().nth(n_existing_items).unwrap(); | ||
156 | return (new_impl_def, first_new_item); | ||
157 | |||
158 | fn add_body(fn_def: ast::Fn) -> ast::Fn { | ||
159 | match fn_def.body() { | ||
160 | Some(_) => fn_def, | ||
161 | None => { | ||
162 | let body = make::block_expr(None, Some(make::ext::expr_todo())) | 152 | let body = make::block_expr(None, Some(make::ext::expr_todo())) |
163 | .indent(edit::IndentLevel(1)); | 153 | .indent(edit::IndentLevel(1)); |
164 | fn_def.with_body(body) | 154 | ted::replace(fn_.get_or_create_body().syntax(), body.clone_for_update().syntax()) |
165 | } | 155 | } |
156 | ast::AssocItem::TypeAlias(type_alias) => { | ||
157 | if let Some(type_bound_list) = type_alias.type_bound_list() { | ||
158 | type_bound_list.remove() | ||
159 | } | ||
160 | } | ||
161 | _ => {} | ||
166 | } | 162 | } |
163 | |||
164 | assoc_item_list.add_item(item) | ||
167 | } | 165 | } |
166 | |||
167 | (res, first_item.unwrap()) | ||
168 | } | 168 | } |
169 | 169 | ||
170 | #[derive(Clone, Copy, Debug)] | 170 | #[derive(Clone, Copy, Debug)] |
diff --git a/crates/ide_completion/src/completions/keyword.rs b/crates/ide_completion/src/completions/keyword.rs index b635e0ca3..61b667104 100644 --- a/crates/ide_completion/src/completions/keyword.rs +++ b/crates/ide_completion/src/completions/keyword.rs | |||
@@ -9,22 +9,21 @@ use crate::{CompletionContext, CompletionItem, CompletionItemKind, CompletionKin | |||
9 | pub(crate) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionContext) { | 9 | pub(crate) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionContext) { |
10 | // complete keyword "crate" in use stmt | 10 | // complete keyword "crate" in use stmt |
11 | let source_range = ctx.source_range(); | 11 | let source_range = ctx.source_range(); |
12 | let kw_completion = move |text: &str| { | ||
13 | let mut item = CompletionItem::new(CompletionKind::Keyword, source_range, text); | ||
14 | item.kind(CompletionItemKind::Keyword).insert_text(text); | ||
15 | item | ||
16 | }; | ||
12 | 17 | ||
13 | if ctx.use_item_syntax.is_some() { | 18 | if ctx.use_item_syntax.is_some() { |
14 | if ctx.path_qual.is_none() { | 19 | if ctx.path_qual.is_none() { |
15 | let mut item = CompletionItem::new(CompletionKind::Keyword, source_range, "crate::"); | 20 | kw_completion("crate::").add_to(acc); |
16 | item.kind(CompletionItemKind::Keyword).insert_text("crate::"); | ||
17 | item.add_to(acc); | ||
18 | } | 21 | } |
19 | let mut item = CompletionItem::new(CompletionKind::Keyword, source_range, "self"); | 22 | kw_completion("self").add_to(acc); |
20 | item.kind(CompletionItemKind::Keyword); | ||
21 | item.add_to(acc); | ||
22 | if iter::successors(ctx.path_qual.clone(), |p| p.qualifier()) | 23 | if iter::successors(ctx.path_qual.clone(), |p| p.qualifier()) |
23 | .all(|p| p.segment().and_then(|s| s.super_token()).is_some()) | 24 | .all(|p| p.segment().and_then(|s| s.super_token()).is_some()) |
24 | { | 25 | { |
25 | let mut item = CompletionItem::new(CompletionKind::Keyword, source_range, "super::"); | 26 | kw_completion("super::").add_to(acc); |
26 | item.kind(CompletionItemKind::Keyword).insert_text("super::"); | ||
27 | item.add_to(acc); | ||
28 | } | 27 | } |
29 | } | 28 | } |
30 | 29 | ||
@@ -32,9 +31,8 @@ pub(crate) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionC | |||
32 | if let Some(receiver) = &ctx.dot_receiver { | 31 | if let Some(receiver) = &ctx.dot_receiver { |
33 | if let Some(ty) = ctx.sema.type_of_expr(receiver) { | 32 | if let Some(ty) = ctx.sema.type_of_expr(receiver) { |
34 | if ty.impls_future(ctx.db) { | 33 | if ty.impls_future(ctx.db) { |
35 | let mut item = | 34 | let mut item = kw_completion("await"); |
36 | CompletionItem::new(CompletionKind::Keyword, ctx.source_range(), "await"); | 35 | item.detail("expr.await"); |
37 | item.kind(CompletionItemKind::Keyword).detail("expr.await").insert_text("await"); | ||
38 | item.add_to(acc); | 36 | item.add_to(acc); |
39 | } | 37 | } |
40 | }; | 38 | }; |
diff --git a/crates/ide_db/src/helpers/merge_imports.rs b/crates/ide_db/src/helpers/merge_imports.rs index af2a51a4d..8fb40e837 100644 --- a/crates/ide_db/src/helpers/merge_imports.rs +++ b/crates/ide_db/src/helpers/merge_imports.rs | |||
@@ -2,8 +2,9 @@ | |||
2 | use std::cmp::Ordering; | 2 | use std::cmp::Ordering; |
3 | 3 | ||
4 | use itertools::{EitherOrBoth, Itertools}; | 4 | use itertools::{EitherOrBoth, Itertools}; |
5 | use syntax::ast::{ | 5 | use syntax::{ |
6 | self, edit::AstNodeEdit, make, AstNode, AttrsOwner, PathSegmentKind, VisibilityOwner, | 6 | ast::{self, make, AstNode, AttrsOwner, PathSegmentKind, VisibilityOwner}, |
7 | ted, | ||
7 | }; | 8 | }; |
8 | 9 | ||
9 | /// What type of merges are allowed. | 10 | /// What type of merges are allowed. |
@@ -41,10 +42,12 @@ pub fn try_merge_imports( | |||
41 | return None; | 42 | return None; |
42 | } | 43 | } |
43 | 44 | ||
45 | let lhs = lhs.clone_subtree().clone_for_update(); | ||
44 | let lhs_tree = lhs.use_tree()?; | 46 | let lhs_tree = lhs.use_tree()?; |
45 | let rhs_tree = rhs.use_tree()?; | 47 | let rhs_tree = rhs.use_tree()?; |
46 | let merged = try_merge_trees(&lhs_tree, &rhs_tree, merge_behavior)?; | 48 | let merged = try_merge_trees(&lhs_tree, &rhs_tree, merge_behavior)?; |
47 | Some(lhs.with_use_tree(merged).clone_for_update()) | 49 | ted::replace(lhs_tree.syntax(), merged.syntax()); |
50 | Some(lhs) | ||
48 | } | 51 | } |
49 | 52 | ||
50 | pub fn try_merge_trees( | 53 | pub fn try_merge_trees( |
@@ -65,7 +68,7 @@ pub fn try_merge_trees( | |||
65 | } else { | 68 | } else { |
66 | (lhs.split_prefix(&lhs_prefix), rhs.split_prefix(&rhs_prefix)) | 69 | (lhs.split_prefix(&lhs_prefix), rhs.split_prefix(&rhs_prefix)) |
67 | }; | 70 | }; |
68 | recursive_merge(&lhs, &rhs, merge) | 71 | recursive_merge(&lhs, &rhs, merge).map(|it| it.clone_for_update()) |
69 | } | 72 | } |
70 | 73 | ||
71 | /// Recursively "zips" together lhs and rhs. | 74 | /// Recursively "zips" together lhs and rhs. |
@@ -78,7 +81,8 @@ fn recursive_merge( | |||
78 | .use_tree_list() | 81 | .use_tree_list() |
79 | .into_iter() | 82 | .into_iter() |
80 | .flat_map(|list| list.use_trees()) | 83 | .flat_map(|list| list.use_trees()) |
81 | // we use Option here to early return from this function(this is not the same as a `filter` op) | 84 | // We use Option here to early return from this function(this is not the |
85 | // same as a `filter` op). | ||
82 | .map(|tree| match merge.is_tree_allowed(&tree) { | 86 | .map(|tree| match merge.is_tree_allowed(&tree) { |
83 | true => Some(tree), | 87 | true => Some(tree), |
84 | false => None, | 88 | false => None, |
@@ -111,8 +115,10 @@ fn recursive_merge( | |||
111 | let tree_is_self = |tree: ast::UseTree| { | 115 | let tree_is_self = |tree: ast::UseTree| { |
112 | tree.path().as_ref().map(path_is_self).unwrap_or(false) | 116 | tree.path().as_ref().map(path_is_self).unwrap_or(false) |
113 | }; | 117 | }; |
114 | // check if only one of the two trees has a tree list, and whether that then contains `self` or not. | 118 | // Check if only one of the two trees has a tree list, and |
115 | // If this is the case we can skip this iteration since the path without the list is already included in the other one via `self` | 119 | // whether that then contains `self` or not. If this is the |
120 | // case we can skip this iteration since the path without | ||
121 | // the list is already included in the other one via `self`. | ||
116 | let tree_contains_self = |tree: &ast::UseTree| { | 122 | let tree_contains_self = |tree: &ast::UseTree| { |
117 | tree.use_tree_list() | 123 | tree.use_tree_list() |
118 | .map(|tree_list| tree_list.use_trees().any(tree_is_self)) | 124 | .map(|tree_list| tree_list.use_trees().any(tree_is_self)) |
@@ -127,9 +133,11 @@ fn recursive_merge( | |||
127 | _ => (), | 133 | _ => (), |
128 | } | 134 | } |
129 | 135 | ||
130 | // glob imports arent part of the use-tree lists so we need to special handle them here as well | 136 | // Glob imports aren't part of the use-tree lists so we need |
131 | // this special handling is only required for when we merge a module import into a glob import of said module | 137 | // to special handle them here as well this special handling |
132 | // see the `merge_self_glob` or `merge_mod_into_glob` tests | 138 | // is only required for when we merge a module import into a |
139 | // glob import of said module see the `merge_self_glob` or | ||
140 | // `merge_mod_into_glob` tests. | ||
133 | if lhs_t.star_token().is_some() || rhs_t.star_token().is_some() { | 141 | if lhs_t.star_token().is_some() || rhs_t.star_token().is_some() { |
134 | *lhs_t = make::use_tree( | 142 | *lhs_t = make::use_tree( |
135 | make::path_unqualified(make::path_segment_self()), | 143 | make::path_unqualified(make::path_segment_self()), |
@@ -165,11 +173,11 @@ fn recursive_merge( | |||
165 | } | 173 | } |
166 | } | 174 | } |
167 | 175 | ||
168 | Some(if let Some(old) = lhs.use_tree_list() { | 176 | let lhs = lhs.clone_subtree().clone_for_update(); |
169 | lhs.replace_descendant(old, make::use_tree_list(use_trees)).clone_for_update() | 177 | if let Some(old) = lhs.use_tree_list() { |
170 | } else { | 178 | ted::replace(old.syntax(), make::use_tree_list(use_trees).syntax().clone_for_update()); |
171 | lhs.clone() | 179 | } |
172 | }) | 180 | ast::UseTree::cast(lhs.syntax().clone_subtree()) |
173 | } | 181 | } |
174 | 182 | ||
175 | /// Traverses both paths until they differ, returning the common prefix of both. | 183 | /// Traverses both paths until they differ, returning the common prefix of both. |
diff --git a/crates/project_model/src/build_data.rs b/crates/project_model/src/build_data.rs index 7b88dca63..3aa546980 100644 --- a/crates/project_model/src/build_data.rs +++ b/crates/project_model/src/build_data.rs | |||
@@ -143,6 +143,7 @@ impl WorkspaceBuildData { | |||
143 | cmd.env("RA_RUSTC_WRAPPER", "1"); | 143 | cmd.env("RA_RUSTC_WRAPPER", "1"); |
144 | } | 144 | } |
145 | 145 | ||
146 | cmd.current_dir(cargo_toml.parent().unwrap()); | ||
146 | cmd.args(&["check", "--quiet", "--workspace", "--message-format=json", "--manifest-path"]) | 147 | cmd.args(&["check", "--quiet", "--workspace", "--message-format=json", "--manifest-path"]) |
147 | .arg(cargo_toml.as_ref()); | 148 | .arg(cargo_toml.as_ref()); |
148 | 149 | ||
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index 56de9681c..ba2790acb 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs | |||
@@ -123,7 +123,7 @@ fn integrated_completion_benchmark() { | |||
123 | }; | 123 | }; |
124 | 124 | ||
125 | { | 125 | { |
126 | let _it = stdx::timeit("unqualified path completion"); | 126 | let _p = profile::span("unqualified path completion"); |
127 | let _span = profile::cpu_span(); | 127 | let _span = profile::cpu_span(); |
128 | let analysis = host.analysis(); | 128 | let analysis = host.analysis(); |
129 | let config = CompletionConfig { | 129 | let config = CompletionConfig { |
@@ -156,7 +156,7 @@ fn integrated_completion_benchmark() { | |||
156 | }; | 156 | }; |
157 | 157 | ||
158 | { | 158 | { |
159 | let _it = stdx::timeit("dot completion"); | 159 | let _p = profile::span("dot completion"); |
160 | let _span = profile::cpu_span(); | 160 | let _span = profile::cpu_span(); |
161 | let analysis = host.analysis(); | 161 | let analysis = host.analysis(); |
162 | let config = CompletionConfig { | 162 | let config = CompletionConfig { |
diff --git a/crates/rust-analyzer/src/semantic_tokens.rs b/crates/rust-analyzer/src/semantic_tokens.rs index ecab89b2a..c9d38693e 100644 --- a/crates/rust-analyzer/src/semantic_tokens.rs +++ b/crates/rust-analyzer/src/semantic_tokens.rs | |||
@@ -91,6 +91,7 @@ define_semantic_token_modifiers![ | |||
91 | (INJECTED, "injected"), | 91 | (INJECTED, "injected"), |
92 | (MUTABLE, "mutable"), | 92 | (MUTABLE, "mutable"), |
93 | (CONSUMING, "consuming"), | 93 | (CONSUMING, "consuming"), |
94 | (ASYNC, "async"), | ||
94 | (UNSAFE, "unsafe"), | 95 | (UNSAFE, "unsafe"), |
95 | (ATTRIBUTE_MODIFIER, "attribute"), | 96 | (ATTRIBUTE_MODIFIER, "attribute"), |
96 | (TRAIT_MODIFIER, "trait"), | 97 | (TRAIT_MODIFIER, "trait"), |
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index ecf6fd12f..8ce4aa242 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs | |||
@@ -496,6 +496,7 @@ fn semantic_token_type_and_modifiers( | |||
496 | HlMod::ControlFlow => semantic_tokens::CONTROL_FLOW, | 496 | HlMod::ControlFlow => semantic_tokens::CONTROL_FLOW, |
497 | HlMod::Mutable => semantic_tokens::MUTABLE, | 497 | HlMod::Mutable => semantic_tokens::MUTABLE, |
498 | HlMod::Consuming => semantic_tokens::CONSUMING, | 498 | HlMod::Consuming => semantic_tokens::CONSUMING, |
499 | HlMod::Async => semantic_tokens::ASYNC, | ||
499 | HlMod::Unsafe => semantic_tokens::UNSAFE, | 500 | HlMod::Unsafe => semantic_tokens::UNSAFE, |
500 | HlMod::Callable => semantic_tokens::CALLABLE, | 501 | HlMod::Callable => semantic_tokens::CALLABLE, |
501 | HlMod::Static => lsp_types::SemanticTokenModifier::STATIC, | 502 | HlMod::Static => lsp_types::SemanticTokenModifier::STATIC, |
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index c0bc59918..747f0b9eb 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml | |||
@@ -13,7 +13,7 @@ doctest = false | |||
13 | [dependencies] | 13 | [dependencies] |
14 | cov-mark = { version = "1.1", features = ["thread-local"] } | 14 | cov-mark = { version = "1.1", features = ["thread-local"] } |
15 | itertools = "0.10.0" | 15 | itertools = "0.10.0" |
16 | rowan = "=0.13.0-pre.5" | 16 | rowan = "=0.13.0-pre.6" |
17 | rustc_lexer = { version = "716.0.0", package = "rustc-ap-rustc_lexer" } | 17 | rustc_lexer = { version = "716.0.0", package = "rustc-ap-rustc_lexer" } |
18 | rustc-hash = "1.1.0" | 18 | rustc-hash = "1.1.0" |
19 | arrayvec = "0.7" | 19 | arrayvec = "0.7" |
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs index 3f9b84ab9..241713c48 100644 --- a/crates/syntax/src/algo.rs +++ b/crates/syntax/src/algo.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use std::{fmt, hash::BuildHasherDefault, ops::RangeInclusive}; | 3 | use std::{hash::BuildHasherDefault, ops::RangeInclusive}; |
4 | 4 | ||
5 | use indexmap::IndexMap; | 5 | use indexmap::IndexMap; |
6 | use itertools::Itertools; | 6 | use itertools::Itertools; |
@@ -330,137 +330,6 @@ fn _replace_children( | |||
330 | with_children(parent, new_children) | 330 | with_children(parent, new_children) |
331 | } | 331 | } |
332 | 332 | ||
333 | #[derive(Debug, PartialEq, Eq, Hash)] | ||
334 | enum InsertPos { | ||
335 | FirstChildOf(SyntaxNode), | ||
336 | After(SyntaxElement), | ||
337 | } | ||
338 | |||
339 | #[derive(Default)] | ||
340 | pub struct SyntaxRewriter<'a> { | ||
341 | //FIXME: add debug_assertions that all elements are in fact from the same file. | ||
342 | replacements: FxHashMap<SyntaxElement, Replacement>, | ||
343 | insertions: IndexMap<InsertPos, Vec<SyntaxElement>>, | ||
344 | _pd: std::marker::PhantomData<&'a ()>, | ||
345 | } | ||
346 | |||
347 | impl fmt::Debug for SyntaxRewriter<'_> { | ||
348 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
349 | f.debug_struct("SyntaxRewriter") | ||
350 | .field("replacements", &self.replacements) | ||
351 | .field("insertions", &self.insertions) | ||
352 | .finish() | ||
353 | } | ||
354 | } | ||
355 | |||
356 | impl SyntaxRewriter<'_> { | ||
357 | pub fn replace<T: Clone + Into<SyntaxElement>>(&mut self, what: &T, with: &T) { | ||
358 | let what = what.clone().into(); | ||
359 | let replacement = Replacement::Single(with.clone().into()); | ||
360 | self.replacements.insert(what, replacement); | ||
361 | } | ||
362 | |||
363 | pub fn rewrite(&self, node: &SyntaxNode) -> SyntaxNode { | ||
364 | let _p = profile::span("rewrite"); | ||
365 | |||
366 | if self.replacements.is_empty() && self.insertions.is_empty() { | ||
367 | return node.clone(); | ||
368 | } | ||
369 | let green = self.rewrite_children(node); | ||
370 | with_green(node, green) | ||
371 | } | ||
372 | |||
373 | pub fn rewrite_ast<N: AstNode>(self, node: &N) -> N { | ||
374 | N::cast(self.rewrite(node.syntax())).unwrap() | ||
375 | } | ||
376 | |||
377 | /// Returns a node that encompasses all replacements to be done by this rewriter. | ||
378 | /// | ||
379 | /// Passing the returned node to `rewrite` will apply all replacements queued up in `self`. | ||
380 | /// | ||
381 | /// Returns `None` when there are no replacements. | ||
382 | pub fn rewrite_root(&self) -> Option<SyntaxNode> { | ||
383 | let _p = profile::span("rewrite_root"); | ||
384 | fn element_to_node_or_parent(element: &SyntaxElement) -> Option<SyntaxNode> { | ||
385 | match element { | ||
386 | SyntaxElement::Node(it) => Some(it.clone()), | ||
387 | SyntaxElement::Token(it) => it.parent(), | ||
388 | } | ||
389 | } | ||
390 | |||
391 | self.replacements | ||
392 | .keys() | ||
393 | .filter_map(element_to_node_or_parent) | ||
394 | .chain(self.insertions.keys().filter_map(|pos| match pos { | ||
395 | InsertPos::FirstChildOf(it) => Some(it.clone()), | ||
396 | InsertPos::After(it) => element_to_node_or_parent(it), | ||
397 | })) | ||
398 | // If we only have one replacement/insertion, we must return its parent node, since `rewrite` does | ||
399 | // not replace the node passed to it. | ||
400 | .map(|it| it.parent().unwrap_or(it)) | ||
401 | .fold1(|a, b| least_common_ancestor(&a, &b).unwrap()) | ||
402 | } | ||
403 | |||
404 | fn replacement(&self, element: &SyntaxElement) -> Option<Replacement> { | ||
405 | self.replacements.get(element).cloned() | ||
406 | } | ||
407 | |||
408 | fn insertions(&self, pos: &InsertPos) -> Option<impl Iterator<Item = SyntaxElement> + '_> { | ||
409 | self.insertions.get(pos).map(|insertions| insertions.iter().cloned()) | ||
410 | } | ||
411 | |||
412 | fn rewrite_children(&self, node: &SyntaxNode) -> rowan::GreenNode { | ||
413 | let _p = profile::span("rewrite_children"); | ||
414 | |||
415 | // FIXME: this could be made much faster. | ||
416 | let mut new_children = Vec::new(); | ||
417 | if let Some(elements) = self.insertions(&InsertPos::FirstChildOf(node.clone())) { | ||
418 | new_children.extend(elements.map(element_to_green)); | ||
419 | } | ||
420 | for child in node.children_with_tokens() { | ||
421 | self.rewrite_self(&mut new_children, &child); | ||
422 | } | ||
423 | |||
424 | rowan::GreenNode::new(rowan::SyntaxKind(node.kind() as u16), new_children) | ||
425 | } | ||
426 | |||
427 | fn rewrite_self( | ||
428 | &self, | ||
429 | acc: &mut Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, | ||
430 | element: &SyntaxElement, | ||
431 | ) { | ||
432 | let _p = profile::span("rewrite_self"); | ||
433 | |||
434 | if let Some(replacement) = self.replacement(&element) { | ||
435 | match replacement { | ||
436 | Replacement::Single(element) => acc.push(element_to_green(element)), | ||
437 | }; | ||
438 | } else { | ||
439 | match element { | ||
440 | NodeOrToken::Token(it) => acc.push(NodeOrToken::Token(it.green().to_owned())), | ||
441 | NodeOrToken::Node(it) => { | ||
442 | acc.push(NodeOrToken::Node(self.rewrite_children(it))); | ||
443 | } | ||
444 | } | ||
445 | } | ||
446 | if let Some(elements) = self.insertions(&InsertPos::After(element.clone())) { | ||
447 | acc.extend(elements.map(element_to_green)); | ||
448 | } | ||
449 | } | ||
450 | } | ||
451 | |||
452 | fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { | ||
453 | match element { | ||
454 | NodeOrToken::Node(it) => NodeOrToken::Node(it.green().into_owned()), | ||
455 | NodeOrToken::Token(it) => NodeOrToken::Token(it.green().to_owned()), | ||
456 | } | ||
457 | } | ||
458 | |||
459 | #[derive(Clone, Debug)] | ||
460 | enum Replacement { | ||
461 | Single(SyntaxElement), | ||
462 | } | ||
463 | |||
464 | fn with_children( | 333 | fn with_children( |
465 | parent: &SyntaxNode, | 334 | parent: &SyntaxNode, |
466 | new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, | 335 | new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, |
diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs index 7f472d4db..a8071b51d 100644 --- a/crates/syntax/src/ast.rs +++ b/crates/syntax/src/ast.rs | |||
@@ -47,6 +47,12 @@ pub trait AstNode { | |||
47 | { | 47 | { |
48 | Self::cast(self.syntax().clone_for_update()).unwrap() | 48 | Self::cast(self.syntax().clone_for_update()).unwrap() |
49 | } | 49 | } |
50 | fn clone_subtree(&self) -> Self | ||
51 | where | ||
52 | Self: Sized, | ||
53 | { | ||
54 | Self::cast(self.syntax().clone_subtree()).unwrap() | ||
55 | } | ||
50 | } | 56 | } |
51 | 57 | ||
52 | /// Like `AstNode`, but wraps tokens rather than interior nodes. | 58 | /// Like `AstNode`, but wraps tokens rather than interior nodes. |
diff --git a/crates/syntax/src/ast/edit.rs b/crates/syntax/src/ast/edit.rs index cbc75f922..4b5f5c571 100644 --- a/crates/syntax/src/ast/edit.rs +++ b/crates/syntax/src/ast/edit.rs | |||
@@ -2,20 +2,20 @@ | |||
2 | //! immutable, all function here return a fresh copy of the tree, instead of | 2 | //! immutable, all function here return a fresh copy of the tree, instead of |
3 | //! doing an in-place modification. | 3 | //! doing an in-place modification. |
4 | use std::{ | 4 | use std::{ |
5 | array, fmt, iter, | 5 | fmt, iter, |
6 | ops::{self, RangeInclusive}, | 6 | ops::{self, RangeInclusive}, |
7 | }; | 7 | }; |
8 | 8 | ||
9 | use arrayvec::ArrayVec; | 9 | use arrayvec::ArrayVec; |
10 | 10 | ||
11 | use crate::{ | 11 | use crate::{ |
12 | algo::{self, SyntaxRewriter}, | 12 | algo, |
13 | ast::{ | 13 | ast::{ |
14 | self, | 14 | self, |
15 | make::{self, tokens}, | 15 | make::{self, tokens}, |
16 | AstNode, GenericParamsOwner, NameOwner, TypeBoundsOwner, | 16 | AstNode, |
17 | }, | 17 | }, |
18 | AstToken, Direction, InsertPosition, SmolStr, SyntaxElement, SyntaxKind, | 18 | ted, AstToken, Direction, InsertPosition, NodeOrToken, SmolStr, SyntaxElement, SyntaxKind, |
19 | SyntaxKind::{ATTR, COMMENT, WHITESPACE}, | 19 | SyntaxKind::{ATTR, COMMENT, WHITESPACE}, |
20 | SyntaxNode, SyntaxToken, T, | 20 | SyntaxNode, SyntaxToken, T, |
21 | }; | 21 | }; |
@@ -29,37 +29,6 @@ impl ast::BinExpr { | |||
29 | } | 29 | } |
30 | } | 30 | } |
31 | 31 | ||
32 | impl ast::Fn { | ||
33 | #[must_use] | ||
34 | pub fn with_body(&self, body: ast::BlockExpr) -> ast::Fn { | ||
35 | let mut to_insert: ArrayVec<SyntaxElement, 2> = ArrayVec::new(); | ||
36 | let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.body() { | ||
37 | old_body.syntax().clone().into() | ||
38 | } else if let Some(semi) = self.semicolon_token() { | ||
39 | to_insert.push(make::tokens::single_space().into()); | ||
40 | semi.into() | ||
41 | } else { | ||
42 | to_insert.push(make::tokens::single_space().into()); | ||
43 | to_insert.push(body.syntax().clone().into()); | ||
44 | return self.insert_children(InsertPosition::Last, to_insert); | ||
45 | }; | ||
46 | to_insert.push(body.syntax().clone().into()); | ||
47 | self.replace_children(single_node(old_body_or_semi), to_insert) | ||
48 | } | ||
49 | |||
50 | #[must_use] | ||
51 | pub fn with_generic_param_list(&self, generic_args: ast::GenericParamList) -> ast::Fn { | ||
52 | if let Some(old) = self.generic_param_list() { | ||
53 | return self.replace_descendant(old, generic_args); | ||
54 | } | ||
55 | |||
56 | let anchor = self.name().expect("The function must have a name").syntax().clone(); | ||
57 | |||
58 | let to_insert = [generic_args.syntax().clone().into()]; | ||
59 | self.insert_children(InsertPosition::After(anchor.into()), array::IntoIter::new(to_insert)) | ||
60 | } | ||
61 | } | ||
62 | |||
63 | fn make_multiline<N>(node: N) -> N | 32 | fn make_multiline<N>(node: N) -> N |
64 | where | 33 | where |
65 | N: AstNode + Clone, | 34 | N: AstNode + Clone, |
@@ -92,81 +61,6 @@ where | |||
92 | } | 61 | } |
93 | } | 62 | } |
94 | 63 | ||
95 | impl ast::Impl { | ||
96 | #[must_use] | ||
97 | pub fn with_assoc_item_list(&self, items: ast::AssocItemList) -> ast::Impl { | ||
98 | let mut to_insert: ArrayVec<SyntaxElement, 2> = ArrayVec::new(); | ||
99 | if let Some(old_items) = self.assoc_item_list() { | ||
100 | let to_replace: SyntaxElement = old_items.syntax().clone().into(); | ||
101 | to_insert.push(items.syntax().clone().into()); | ||
102 | self.replace_children(single_node(to_replace), to_insert) | ||
103 | } else { | ||
104 | to_insert.push(make::tokens::single_space().into()); | ||
105 | to_insert.push(items.syntax().clone().into()); | ||
106 | self.insert_children(InsertPosition::Last, to_insert) | ||
107 | } | ||
108 | } | ||
109 | } | ||
110 | |||
111 | impl ast::AssocItemList { | ||
112 | #[must_use] | ||
113 | pub fn append_items( | ||
114 | &self, | ||
115 | items: impl IntoIterator<Item = ast::AssocItem>, | ||
116 | ) -> ast::AssocItemList { | ||
117 | let mut res = self.clone(); | ||
118 | if !self.syntax().text().contains_char('\n') { | ||
119 | res = make_multiline(res); | ||
120 | } | ||
121 | items.into_iter().for_each(|it| res = res.append_item(it)); | ||
122 | res.fixup_trailing_whitespace().unwrap_or(res) | ||
123 | } | ||
124 | |||
125 | #[must_use] | ||
126 | pub fn append_item(&self, item: ast::AssocItem) -> ast::AssocItemList { | ||
127 | let (indent, position, whitespace) = match self.assoc_items().last() { | ||
128 | Some(it) => ( | ||
129 | leading_indent(it.syntax()).unwrap_or_default().to_string(), | ||
130 | InsertPosition::After(it.syntax().clone().into()), | ||
131 | "\n\n", | ||
132 | ), | ||
133 | None => match self.l_curly_token() { | ||
134 | Some(it) => ( | ||
135 | " ".to_string() + &leading_indent(self.syntax()).unwrap_or_default(), | ||
136 | InsertPosition::After(it.into()), | ||
137 | "\n", | ||
138 | ), | ||
139 | None => return self.clone(), | ||
140 | }, | ||
141 | }; | ||
142 | let ws = tokens::WsBuilder::new(&format!("{}{}", whitespace, indent)); | ||
143 | let to_insert: ArrayVec<SyntaxElement, 2> = | ||
144 | [ws.ws().into(), item.syntax().clone().into()].into(); | ||
145 | self.insert_children(position, to_insert) | ||
146 | } | ||
147 | |||
148 | /// Remove extra whitespace between last item and closing curly brace. | ||
149 | fn fixup_trailing_whitespace(&self) -> Option<ast::AssocItemList> { | ||
150 | let first_token_after_items = | ||
151 | self.assoc_items().last()?.syntax().next_sibling_or_token()?; | ||
152 | let last_token_before_curly = self.r_curly_token()?.prev_sibling_or_token()?; | ||
153 | if last_token_before_curly != first_token_after_items { | ||
154 | // there is something more between last item and | ||
155 | // right curly than just whitespace - bail out | ||
156 | return None; | ||
157 | } | ||
158 | let whitespace = | ||
159 | last_token_before_curly.clone().into_token().and_then(ast::Whitespace::cast)?; | ||
160 | let text = whitespace.syntax().text(); | ||
161 | let newline = text.rfind('\n')?; | ||
162 | let keep = tokens::WsBuilder::new(&text[newline..]); | ||
163 | Some(self.replace_children( | ||
164 | first_token_after_items..=last_token_before_curly, | ||
165 | std::iter::once(keep.ws().into()), | ||
166 | )) | ||
167 | } | ||
168 | } | ||
169 | |||
170 | impl ast::RecordExprFieldList { | 64 | impl ast::RecordExprFieldList { |
171 | #[must_use] | 65 | #[must_use] |
172 | pub fn append_field(&self, field: &ast::RecordExprField) -> ast::RecordExprFieldList { | 66 | pub fn append_field(&self, field: &ast::RecordExprField) -> ast::RecordExprFieldList { |
@@ -243,36 +137,6 @@ impl ast::RecordExprFieldList { | |||
243 | } | 137 | } |
244 | } | 138 | } |
245 | 139 | ||
246 | impl ast::TypeAlias { | ||
247 | #[must_use] | ||
248 | pub fn remove_bounds(&self) -> ast::TypeAlias { | ||
249 | let colon = match self.colon_token() { | ||
250 | Some(it) => it, | ||
251 | None => return self.clone(), | ||
252 | }; | ||
253 | let end = match self.type_bound_list() { | ||
254 | Some(it) => it.syntax().clone().into(), | ||
255 | None => colon.clone().into(), | ||
256 | }; | ||
257 | self.replace_children(colon.into()..=end, iter::empty()) | ||
258 | } | ||
259 | } | ||
260 | |||
261 | impl ast::TypeParam { | ||
262 | #[must_use] | ||
263 | pub fn remove_bounds(&self) -> ast::TypeParam { | ||
264 | let colon = match self.colon_token() { | ||
265 | Some(it) => it, | ||
266 | None => return self.clone(), | ||
267 | }; | ||
268 | let end = match self.type_bound_list() { | ||
269 | Some(it) => it.syntax().clone().into(), | ||
270 | None => colon.clone().into(), | ||
271 | }; | ||
272 | self.replace_children(colon.into()..=end, iter::empty()) | ||
273 | } | ||
274 | } | ||
275 | |||
276 | impl ast::Path { | 140 | impl ast::Path { |
277 | #[must_use] | 141 | #[must_use] |
278 | pub fn with_segment(&self, segment: ast::PathSegment) -> ast::Path { | 142 | pub fn with_segment(&self, segment: ast::PathSegment) -> ast::Path { |
@@ -313,33 +177,7 @@ impl ast::PathSegment { | |||
313 | } | 177 | } |
314 | } | 178 | } |
315 | 179 | ||
316 | impl ast::Use { | ||
317 | #[must_use] | ||
318 | pub fn with_use_tree(&self, use_tree: ast::UseTree) -> ast::Use { | ||
319 | if let Some(old) = self.use_tree() { | ||
320 | return self.replace_descendant(old, use_tree); | ||
321 | } | ||
322 | self.clone() | ||
323 | } | ||
324 | } | ||
325 | |||
326 | impl ast::UseTree { | 180 | impl ast::UseTree { |
327 | #[must_use] | ||
328 | pub fn with_path(&self, path: ast::Path) -> ast::UseTree { | ||
329 | if let Some(old) = self.path() { | ||
330 | return self.replace_descendant(old, path); | ||
331 | } | ||
332 | self.clone() | ||
333 | } | ||
334 | |||
335 | #[must_use] | ||
336 | pub fn with_use_tree_list(&self, use_tree_list: ast::UseTreeList) -> ast::UseTree { | ||
337 | if let Some(old) = self.use_tree_list() { | ||
338 | return self.replace_descendant(old, use_tree_list); | ||
339 | } | ||
340 | self.clone() | ||
341 | } | ||
342 | |||
343 | /// Splits off the given prefix, making it the path component of the use tree, appending the rest of the path to all UseTreeList items. | 181 | /// Splits off the given prefix, making it the path component of the use tree, appending the rest of the path to all UseTreeList items. |
344 | #[must_use] | 182 | #[must_use] |
345 | pub fn split_prefix(&self, prefix: &ast::Path) -> ast::UseTree { | 183 | pub fn split_prefix(&self, prefix: &ast::Path) -> ast::UseTree { |
@@ -449,61 +287,6 @@ impl ast::MatchArmList { | |||
449 | } | 287 | } |
450 | } | 288 | } |
451 | 289 | ||
452 | impl ast::GenericParamList { | ||
453 | #[must_use] | ||
454 | pub fn append_params( | ||
455 | &self, | ||
456 | params: impl IntoIterator<Item = ast::GenericParam>, | ||
457 | ) -> ast::GenericParamList { | ||
458 | let mut res = self.clone(); | ||
459 | params.into_iter().for_each(|it| res = res.append_param(it)); | ||
460 | res | ||
461 | } | ||
462 | |||
463 | #[must_use] | ||
464 | pub fn append_param(&self, item: ast::GenericParam) -> ast::GenericParamList { | ||
465 | let space = tokens::single_space(); | ||
466 | |||
467 | let mut to_insert: ArrayVec<SyntaxElement, 4> = ArrayVec::new(); | ||
468 | if self.generic_params().next().is_some() { | ||
469 | to_insert.push(space.into()); | ||
470 | } | ||
471 | to_insert.push(item.syntax().clone().into()); | ||
472 | |||
473 | macro_rules! after_l_angle { | ||
474 | () => {{ | ||
475 | let anchor = match self.l_angle_token() { | ||
476 | Some(it) => it.into(), | ||
477 | None => return self.clone(), | ||
478 | }; | ||
479 | InsertPosition::After(anchor) | ||
480 | }}; | ||
481 | } | ||
482 | |||
483 | macro_rules! after_field { | ||
484 | ($anchor:expr) => { | ||
485 | if let Some(comma) = $anchor | ||
486 | .syntax() | ||
487 | .siblings_with_tokens(Direction::Next) | ||
488 | .find(|it| it.kind() == T![,]) | ||
489 | { | ||
490 | InsertPosition::After(comma) | ||
491 | } else { | ||
492 | to_insert.insert(0, make::token(T![,]).into()); | ||
493 | InsertPosition::After($anchor.syntax().clone().into()) | ||
494 | } | ||
495 | }; | ||
496 | } | ||
497 | |||
498 | let position = match self.generic_params().last() { | ||
499 | Some(it) => after_field!(it), | ||
500 | None => after_l_angle!(), | ||
501 | }; | ||
502 | |||
503 | self.insert_children(position, to_insert) | ||
504 | } | ||
505 | } | ||
506 | |||
507 | #[must_use] | 290 | #[must_use] |
508 | pub fn remove_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N { | 291 | pub fn remove_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N { |
509 | N::cast(remove_attrs_and_docs_inner(node.syntax().clone())).unwrap() | 292 | N::cast(remove_attrs_and_docs_inner(node.syntax().clone())).unwrap() |
@@ -554,6 +337,12 @@ impl ops::Add<u8> for IndentLevel { | |||
554 | } | 337 | } |
555 | 338 | ||
556 | impl IndentLevel { | 339 | impl IndentLevel { |
340 | pub fn single() -> IndentLevel { | ||
341 | IndentLevel(0) | ||
342 | } | ||
343 | pub fn is_zero(&self) -> bool { | ||
344 | self.0 == 0 | ||
345 | } | ||
557 | pub fn from_element(element: &SyntaxElement) -> IndentLevel { | 346 | pub fn from_element(element: &SyntaxElement) -> IndentLevel { |
558 | match element { | 347 | match element { |
559 | rowan::NodeOrToken::Node(it) => IndentLevel::from_node(it), | 348 | rowan::NodeOrToken::Node(it) => IndentLevel::from_node(it), |
@@ -588,37 +377,39 @@ impl IndentLevel { | |||
588 | /// ``` | 377 | /// ``` |
589 | /// if you indent the block, the `{` token would stay put. | 378 | /// if you indent the block, the `{` token would stay put. |
590 | fn increase_indent(self, node: SyntaxNode) -> SyntaxNode { | 379 | fn increase_indent(self, node: SyntaxNode) -> SyntaxNode { |
591 | let mut rewriter = SyntaxRewriter::default(); | 380 | let res = node.clone_subtree().clone_for_update(); |
592 | node.descendants_with_tokens() | 381 | let tokens = res.preorder_with_tokens().filter_map(|event| match event { |
593 | .filter_map(|el| el.into_token()) | 382 | rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it), |
594 | .filter_map(ast::Whitespace::cast) | 383 | _ => None, |
595 | .filter(|ws| { | 384 | }); |
596 | let text = ws.syntax().text(); | 385 | for token in tokens { |
597 | text.contains('\n') | 386 | if let Some(ws) = ast::Whitespace::cast(token) { |
598 | }) | 387 | if ws.text().contains('\n') { |
599 | .for_each(|ws| { | 388 | let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self)); |
600 | let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self,)); | 389 | ted::replace(ws.syntax(), &new_ws) |
601 | rewriter.replace(ws.syntax(), &new_ws) | 390 | } |
602 | }); | 391 | } |
603 | rewriter.rewrite(&node) | 392 | } |
393 | res.clone_subtree() | ||
604 | } | 394 | } |
605 | 395 | ||
606 | fn decrease_indent(self, node: SyntaxNode) -> SyntaxNode { | 396 | fn decrease_indent(self, node: SyntaxNode) -> SyntaxNode { |
607 | let mut rewriter = SyntaxRewriter::default(); | 397 | let res = node.clone_subtree().clone_for_update(); |
608 | node.descendants_with_tokens() | 398 | let tokens = res.preorder_with_tokens().filter_map(|event| match event { |
609 | .filter_map(|el| el.into_token()) | 399 | rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it), |
610 | .filter_map(ast::Whitespace::cast) | 400 | _ => None, |
611 | .filter(|ws| { | 401 | }); |
612 | let text = ws.syntax().text(); | 402 | for token in tokens { |
613 | text.contains('\n') | 403 | if let Some(ws) = ast::Whitespace::cast(token) { |
614 | }) | 404 | if ws.text().contains('\n') { |
615 | .for_each(|ws| { | 405 | let new_ws = make::tokens::whitespace( |
616 | let new_ws = make::tokens::whitespace( | 406 | &ws.syntax().text().replace(&format!("\n{}", self), "\n"), |
617 | &ws.syntax().text().replace(&format!("\n{}", self), "\n"), | 407 | ); |
618 | ); | 408 | ted::replace(ws.syntax(), &new_ws) |
619 | rewriter.replace(ws.syntax(), &new_ws) | 409 | } |
620 | }); | 410 | } |
621 | rewriter.rewrite(&node) | 411 | } |
412 | res.clone_subtree() | ||
622 | } | 413 | } |
623 | } | 414 | } |
624 | 415 | ||
@@ -662,13 +453,6 @@ pub trait AstNodeEdit: AstNode + Clone + Sized { | |||
662 | let new_syntax = algo::replace_children(self.syntax(), to_replace, to_insert); | 453 | let new_syntax = algo::replace_children(self.syntax(), to_replace, to_insert); |
663 | Self::cast(new_syntax).unwrap() | 454 | Self::cast(new_syntax).unwrap() |
664 | } | 455 | } |
665 | |||
666 | #[must_use] | ||
667 | fn replace_descendant<D: AstNode>(&self, old: D, new: D) -> Self { | ||
668 | let mut rewriter = SyntaxRewriter::default(); | ||
669 | rewriter.replace(old.syntax(), new.syntax()); | ||
670 | rewriter.rewrite_ast(self) | ||
671 | } | ||
672 | fn indent_level(&self) -> IndentLevel { | 456 | fn indent_level(&self) -> IndentLevel { |
673 | IndentLevel::from_node(self.syntax()) | 457 | IndentLevel::from_node(self.syntax()) |
674 | } | 458 | } |
diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index 168355555..ca777d057 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs | |||
@@ -2,11 +2,16 @@ | |||
2 | 2 | ||
3 | use std::iter::empty; | 3 | use std::iter::empty; |
4 | 4 | ||
5 | use parser::T; | 5 | use parser::{SyntaxKind, T}; |
6 | use rowan::SyntaxElement; | ||
6 | 7 | ||
7 | use crate::{ | 8 | use crate::{ |
8 | algo::neighbor, | 9 | algo::neighbor, |
9 | ast::{self, edit::AstNodeEdit, make, GenericParamsOwner, WhereClause}, | 10 | ast::{ |
11 | self, | ||
12 | edit::{AstNodeEdit, IndentLevel}, | ||
13 | make, GenericParamsOwner, | ||
14 | }, | ||
10 | ted::{self, Position}, | 15 | ted::{self, Position}, |
11 | AstNode, AstToken, Direction, | 16 | AstNode, AstToken, Direction, |
12 | }; | 17 | }; |
@@ -37,7 +42,7 @@ impl GenericParamsOwnerEdit for ast::Fn { | |||
37 | } | 42 | } |
38 | } | 43 | } |
39 | 44 | ||
40 | fn get_or_create_where_clause(&self) -> WhereClause { | 45 | fn get_or_create_where_clause(&self) -> ast::WhereClause { |
41 | if self.where_clause().is_none() { | 46 | if self.where_clause().is_none() { |
42 | let position = if let Some(ty) = self.ret_type() { | 47 | let position = if let Some(ty) = self.ret_type() { |
43 | Position::after(ty.syntax()) | 48 | Position::after(ty.syntax()) |
@@ -67,7 +72,7 @@ impl GenericParamsOwnerEdit for ast::Impl { | |||
67 | } | 72 | } |
68 | } | 73 | } |
69 | 74 | ||
70 | fn get_or_create_where_clause(&self) -> WhereClause { | 75 | fn get_or_create_where_clause(&self) -> ast::WhereClause { |
71 | if self.where_clause().is_none() { | 76 | if self.where_clause().is_none() { |
72 | let position = if let Some(items) = self.assoc_item_list() { | 77 | let position = if let Some(items) = self.assoc_item_list() { |
73 | Position::before(items.syntax()) | 78 | Position::before(items.syntax()) |
@@ -97,7 +102,7 @@ impl GenericParamsOwnerEdit for ast::Trait { | |||
97 | } | 102 | } |
98 | } | 103 | } |
99 | 104 | ||
100 | fn get_or_create_where_clause(&self) -> WhereClause { | 105 | fn get_or_create_where_clause(&self) -> ast::WhereClause { |
101 | if self.where_clause().is_none() { | 106 | if self.where_clause().is_none() { |
102 | let position = if let Some(items) = self.assoc_item_list() { | 107 | let position = if let Some(items) = self.assoc_item_list() { |
103 | Position::before(items.syntax()) | 108 | Position::before(items.syntax()) |
@@ -127,7 +132,7 @@ impl GenericParamsOwnerEdit for ast::Struct { | |||
127 | } | 132 | } |
128 | } | 133 | } |
129 | 134 | ||
130 | fn get_or_create_where_clause(&self) -> WhereClause { | 135 | fn get_or_create_where_clause(&self) -> ast::WhereClause { |
131 | if self.where_clause().is_none() { | 136 | if self.where_clause().is_none() { |
132 | let tfl = self.field_list().and_then(|fl| match fl { | 137 | let tfl = self.field_list().and_then(|fl| match fl { |
133 | ast::FieldList::RecordFieldList(_) => None, | 138 | ast::FieldList::RecordFieldList(_) => None, |
@@ -165,7 +170,7 @@ impl GenericParamsOwnerEdit for ast::Enum { | |||
165 | } | 170 | } |
166 | } | 171 | } |
167 | 172 | ||
168 | fn get_or_create_where_clause(&self) -> WhereClause { | 173 | fn get_or_create_where_clause(&self) -> ast::WhereClause { |
169 | if self.where_clause().is_none() { | 174 | if self.where_clause().is_none() { |
170 | let position = if let Some(gpl) = self.generic_param_list() { | 175 | let position = if let Some(gpl) = self.generic_param_list() { |
171 | Position::after(gpl.syntax()) | 176 | Position::after(gpl.syntax()) |
@@ -272,6 +277,75 @@ impl ast::Use { | |||
272 | } | 277 | } |
273 | } | 278 | } |
274 | 279 | ||
280 | impl ast::Impl { | ||
281 | pub fn get_or_create_assoc_item_list(&self) -> ast::AssocItemList { | ||
282 | if self.assoc_item_list().is_none() { | ||
283 | let assoc_item_list = make::assoc_item_list().clone_for_update(); | ||
284 | ted::append_child(self.syntax(), assoc_item_list.syntax()); | ||
285 | } | ||
286 | self.assoc_item_list().unwrap() | ||
287 | } | ||
288 | } | ||
289 | |||
290 | impl ast::AssocItemList { | ||
291 | pub fn add_item(&self, item: ast::AssocItem) { | ||
292 | let (indent, position, whitespace) = match self.assoc_items().last() { | ||
293 | Some(last_item) => ( | ||
294 | IndentLevel::from_node(last_item.syntax()), | ||
295 | Position::after(last_item.syntax()), | ||
296 | "\n\n", | ||
297 | ), | ||
298 | None => match self.l_curly_token() { | ||
299 | Some(l_curly) => { | ||
300 | self.normalize_ws_between_braces(); | ||
301 | (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly), "\n") | ||
302 | } | ||
303 | None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"), | ||
304 | }, | ||
305 | }; | ||
306 | let elements: Vec<SyntaxElement<_>> = vec![ | ||
307 | make::tokens::whitespace(&format!("{}{}", whitespace, indent)).into(), | ||
308 | item.syntax().clone().into(), | ||
309 | ]; | ||
310 | ted::insert_all(position, elements); | ||
311 | } | ||
312 | |||
313 | fn normalize_ws_between_braces(&self) -> Option<()> { | ||
314 | let l = self.l_curly_token()?; | ||
315 | let r = self.r_curly_token()?; | ||
316 | let indent = IndentLevel::from_node(self.syntax()); | ||
317 | |||
318 | match l.next_sibling_or_token() { | ||
319 | Some(ws) if ws.kind() == SyntaxKind::WHITESPACE => { | ||
320 | if ws.next_sibling_or_token()?.into_token()? == r { | ||
321 | ted::replace(ws, make::tokens::whitespace(&format!("\n{}", indent))); | ||
322 | } | ||
323 | } | ||
324 | Some(ws) if ws.kind() == T!['}'] => { | ||
325 | ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{}", indent))); | ||
326 | } | ||
327 | _ => (), | ||
328 | } | ||
329 | Some(()) | ||
330 | } | ||
331 | } | ||
332 | |||
333 | impl ast::Fn { | ||
334 | pub fn get_or_create_body(&self) -> ast::BlockExpr { | ||
335 | if self.body().is_none() { | ||
336 | let body = make::ext::empty_block_expr().clone_for_update(); | ||
337 | match self.semicolon_token() { | ||
338 | Some(semi) => { | ||
339 | ted::replace(semi, body.syntax()); | ||
340 | ted::insert(Position::before(body.syntax), make::tokens::single_space()); | ||
341 | } | ||
342 | None => ted::append_child(self.syntax(), body.syntax()), | ||
343 | } | ||
344 | } | ||
345 | self.body().unwrap() | ||
346 | } | ||
347 | } | ||
348 | |||
275 | #[cfg(test)] | 349 | #[cfg(test)] |
276 | mod tests { | 350 | mod tests { |
277 | use std::fmt; | 351 | use std::fmt; |
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 1998ad1f6..d13926ded 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs | |||
@@ -12,7 +12,7 @@ | |||
12 | use itertools::Itertools; | 12 | use itertools::Itertools; |
13 | use stdx::{format_to, never}; | 13 | use stdx::{format_to, never}; |
14 | 14 | ||
15 | use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, SyntaxToken}; | 15 | use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxToken}; |
16 | 16 | ||
17 | /// While the parent module defines basic atomic "constructors", the `ext` | 17 | /// While the parent module defines basic atomic "constructors", the `ext` |
18 | /// module defines shortcuts for common things. | 18 | /// module defines shortcuts for common things. |
@@ -99,7 +99,7 @@ fn ty_from_text(text: &str) -> ast::Type { | |||
99 | } | 99 | } |
100 | 100 | ||
101 | pub fn assoc_item_list() -> ast::AssocItemList { | 101 | pub fn assoc_item_list() -> ast::AssocItemList { |
102 | ast_from_text("impl C for D {};") | 102 | ast_from_text("impl C for D {}") |
103 | } | 103 | } |
104 | 104 | ||
105 | pub fn impl_trait(trait_: ast::Path, ty: ast::Path) -> ast::Impl { | 105 | pub fn impl_trait(trait_: ast::Path, ty: ast::Path) -> ast::Impl { |
@@ -601,17 +601,11 @@ fn ast_from_text<N: AstNode>(text: &str) -> N { | |||
601 | panic!("Failed to make ast node `{}` from text {}", std::any::type_name::<N>(), text) | 601 | panic!("Failed to make ast node `{}` from text {}", std::any::type_name::<N>(), text) |
602 | } | 602 | } |
603 | }; | 603 | }; |
604 | let node = node.syntax().clone(); | 604 | let node = node.clone_subtree(); |
605 | let node = unroot(node); | ||
606 | let node = N::cast(node).unwrap(); | ||
607 | assert_eq!(node.syntax().text_range().start(), 0.into()); | 605 | assert_eq!(node.syntax().text_range().start(), 0.into()); |
608 | node | 606 | node |
609 | } | 607 | } |
610 | 608 | ||
611 | fn unroot(n: SyntaxNode) -> SyntaxNode { | ||
612 | SyntaxNode::new_root(n.green().into()) | ||
613 | } | ||
614 | |||
615 | pub fn token(kind: SyntaxKind) -> SyntaxToken { | 609 | pub fn token(kind: SyntaxKind) -> SyntaxToken { |
616 | tokens::SOURCE_FILE | 610 | tokens::SOURCE_FILE |
617 | .tree() | 611 | .tree() |
diff --git a/crates/syntax/src/parsing.rs b/crates/syntax/src/parsing.rs index 333bde54a..431ed0699 100644 --- a/crates/syntax/src/parsing.rs +++ b/crates/syntax/src/parsing.rs | |||
@@ -6,14 +6,13 @@ mod text_token_source; | |||
6 | mod text_tree_sink; | 6 | mod text_tree_sink; |
7 | mod reparsing; | 7 | mod reparsing; |
8 | 8 | ||
9 | use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode}; | 9 | use parser::SyntaxKind; |
10 | use text_token_source::TextTokenSource; | 10 | use text_token_source::TextTokenSource; |
11 | use text_tree_sink::TextTreeSink; | 11 | use text_tree_sink::TextTreeSink; |
12 | 12 | ||
13 | pub(crate) use lexer::*; | 13 | use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode}; |
14 | 14 | ||
15 | pub(crate) use self::reparsing::incremental_reparse; | 15 | pub(crate) use crate::parsing::{lexer::*, reparsing::incremental_reparse}; |
16 | use parser::SyntaxKind; | ||
17 | 16 | ||
18 | pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { | 17 | pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { |
19 | let (tokens, lexer_errors) = tokenize(&text); | 18 | let (tokens, lexer_errors) = tokenize(&text); |
diff --git a/crates/syntax/src/parsing/text_tree_sink.rs b/crates/syntax/src/parsing/text_tree_sink.rs index 1934204ea..d63ec080b 100644 --- a/crates/syntax/src/parsing/text_tree_sink.rs +++ b/crates/syntax/src/parsing/text_tree_sink.rs | |||
@@ -147,8 +147,8 @@ fn n_attached_trivias<'a>( | |||
147 | trivias: impl Iterator<Item = (SyntaxKind, &'a str)>, | 147 | trivias: impl Iterator<Item = (SyntaxKind, &'a str)>, |
148 | ) -> usize { | 148 | ) -> usize { |
149 | match kind { | 149 | match kind { |
150 | MACRO_CALL | MACRO_RULES | MACRO_DEF | CONST | TYPE_ALIAS | STRUCT | UNION | ENUM | 150 | CONST | ENUM | FN | IMPL | MACRO_CALL | MACRO_DEF | MACRO_RULES | MODULE | RECORD_FIELD |
151 | | VARIANT | FN | TRAIT | MODULE | RECORD_FIELD | STATIC | USE => { | 151 | | STATIC | STRUCT | TRAIT | TUPLE_FIELD | TYPE_ALIAS | UNION | USE | VARIANT => { |
152 | let mut res = 0; | 152 | let mut res = 0; |
153 | let mut trivias = trivias.enumerate().peekable(); | 153 | let mut trivias = trivias.enumerate().peekable(); |
154 | 154 | ||
diff --git a/crates/syntax/test_data/parser/ok/0045_block_attrs.rast b/crates/syntax/test_data/parser/ok/0045_block_attrs.rast index 50ab52d32..5e50b4e0b 100644 --- a/crates/syntax/test_data/parser/ok/0045_block_attrs.rast +++ b/crates/syntax/test_data/parser/ok/0045_block_attrs.rast | |||
@@ -127,9 +127,9 @@ [email protected] | |||
127 | [email protected] "\n" | 127 | [email protected] "\n" |
128 | [email protected] "}" | 128 | [email protected] "}" |
129 | [email protected] "\n\n" | 129 | [email protected] "\n\n" |
130 | COMMENT@541..601 "// https://github.com ..." | 130 | IMPL@541..763 |
131 | WHITESPACE@601..602 "\n" | 131 | COMMENT@541..601 "// https://github.com ..." |
132 | IMPL@602..763 | 132 | WHITESPACE@601..602 "\n" |
133 | [email protected] "impl" | 133 | [email protected] "impl" |
134 | [email protected] " " | 134 | [email protected] " " |
135 | [email protected] | 135 | [email protected] |
diff --git a/docs/dev/style.md b/docs/dev/style.md index d24a5952e..f22b69768 100644 --- a/docs/dev/style.md +++ b/docs/dev/style.md | |||
@@ -636,6 +636,10 @@ use crate::{} | |||
636 | 636 | ||
637 | // Finally, parent and child modules, but prefer `use crate::`. | 637 | // Finally, parent and child modules, but prefer `use crate::`. |
638 | use super::{} | 638 | use super::{} |
639 | |||
640 | // Re-exports are treated as item definitions rather than imports, so they go | ||
641 | // after imports and modules. Use them sparingly. | ||
642 | pub use crate::x::Z; | ||
639 | ``` | 643 | ``` |
640 | 644 | ||
641 | **Rationale:** consistency. | 645 | **Rationale:** consistency. |
@@ -694,6 +698,9 @@ Avoid local `use MyEnum::*` imports. | |||
694 | Prefer `use crate::foo::bar` to `use super::bar` or `use self::bar::baz`. | 698 | Prefer `use crate::foo::bar` to `use super::bar` or `use self::bar::baz`. |
695 | **Rationale:** consistency, this is the style which works in all cases. | 699 | **Rationale:** consistency, this is the style which works in all cases. |
696 | 700 | ||
701 | By default, avoid re-exports. | ||
702 | **Rationale:** for non-library code, re-exports introduce two ways to use something and allow for inconsistency. | ||
703 | |||
697 | ## Order of Items | 704 | ## Order of Items |
698 | 705 | ||
699 | Optimize for the reader who sees the file for the first time, and wants to get a general idea about what's going on. | 706 | Optimize for the reader who sees the file for the first time, and wants to get a general idea about what's going on. |
@@ -944,6 +951,28 @@ match p.current() { | |||
944 | 951 | ||
945 | ## Documentation | 952 | ## Documentation |
946 | 953 | ||
954 | Style inline code comments as proper sentences. | ||
955 | Start with a capital letter, end with a dot. | ||
956 | |||
957 | ```rust | ||
958 | // GOOD | ||
959 | |||
960 | // Only simple single segment paths are allowed. | ||
961 | MergeBehavior::Last => { | ||
962 | tree.use_tree_list().is_none() && tree.path().map(path_len) <= Some(1) | ||
963 | } | ||
964 | |||
965 | // BAD | ||
966 | |||
967 | // only simple single segment paths are allowed | ||
968 | MergeBehavior::Last => { | ||
969 | tree.use_tree_list().is_none() && tree.path().map(path_len) <= Some(1) | ||
970 | } | ||
971 | ``` | ||
972 | |||
973 | **Rationale:** writing a sentence (or maybe even a paragraph) rather just "a comment" creates a more appropriate frame of mind. | ||
974 | It tricks you into writing down more of the context you keep in your head while coding. | ||
975 | |||
947 | For `.md` and `.adoc` files, prefer a sentence-per-line format, don't wrap lines. | 976 | For `.md` and `.adoc` files, prefer a sentence-per-line format, don't wrap lines. |
948 | If the line is too long, you want to split the sentence in two :-) | 977 | If the line is too long, you want to split the sentence in two :-) |
949 | 978 | ||