aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/ra_assists/src/assist_ctx.rs6
-rw-r--r--crates/ra_assists/src/ast_transform.rs12
-rw-r--r--crates/ra_assists/src/handlers/merge_imports.rs103
-rw-r--r--crates/ra_hir_expand/src/eager.rs17
-rw-r--r--crates/ra_ide/src/expand_macro.rs12
-rw-r--r--crates/ra_ide/src/inlay_hints.rs176
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs359
-rw-r--r--crates/ra_mbe/src/tests.rs62
-rw-r--r--crates/ra_syntax/src/algo.rs138
-rw-r--r--crates/ra_syntax/src/ast/edit.rs103
-rw-r--r--crates/rust-analyzer/src/config.rs3
-rw-r--r--crates/rust-analyzer/src/conv.rs1
-rw-r--r--crates/rust-analyzer/src/main_loop.rs1
-rw-r--r--crates/rust-analyzer/src/req.rs1
14 files changed, 697 insertions, 297 deletions
diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs
index 62182cf03..c3e653299 100644
--- a/crates/ra_assists/src/assist_ctx.rs
+++ b/crates/ra_assists/src/assist_ctx.rs
@@ -11,6 +11,7 @@ use ra_syntax::{
11use ra_text_edit::TextEditBuilder; 11use ra_text_edit::TextEditBuilder;
12 12
13use crate::{AssistAction, AssistId, AssistLabel, GroupLabel, ResolvedAssist}; 13use crate::{AssistAction, AssistId, AssistLabel, GroupLabel, ResolvedAssist};
14use algo::SyntaxRewriter;
14 15
15#[derive(Clone, Debug)] 16#[derive(Clone, Debug)]
16pub(crate) struct Assist(pub(crate) Vec<AssistInfo>); 17pub(crate) struct Assist(pub(crate) Vec<AssistInfo>);
@@ -234,6 +235,11 @@ impl ActionBuilder {
234 pub(crate) fn replace_ast<N: AstNode>(&mut self, old: N, new: N) { 235 pub(crate) fn replace_ast<N: AstNode>(&mut self, old: N, new: N) {
235 algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit) 236 algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit)
236 } 237 }
238 pub(crate) fn rewrite(&mut self, rewriter: SyntaxRewriter) {
239 let node = rewriter.rewrite_root().unwrap();
240 let new = rewriter.rewrite(&node);
241 algo::diff(&node, &new).into_text_edit(&mut self.edit)
242 }
237 243
238 fn build(self) -> AssistAction { 244 fn build(self) -> AssistAction {
239 AssistAction { 245 AssistAction {
diff --git a/crates/ra_assists/src/ast_transform.rs b/crates/ra_assists/src/ast_transform.rs
index 45558c448..52b4c82db 100644
--- a/crates/ra_assists/src/ast_transform.rs
+++ b/crates/ra_assists/src/ast_transform.rs
@@ -3,7 +3,10 @@ use rustc_hash::FxHashMap;
3 3
4use hir::{PathResolution, SemanticsScope}; 4use hir::{PathResolution, SemanticsScope};
5use ra_ide_db::RootDatabase; 5use ra_ide_db::RootDatabase;
6use ra_syntax::ast::{self, AstNode}; 6use ra_syntax::{
7 algo::SyntaxRewriter,
8 ast::{self, AstNode},
9};
7 10
8pub trait AstTransform<'a> { 11pub trait AstTransform<'a> {
9 fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode>; 12 fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode>;
@@ -153,15 +156,14 @@ impl<'a> QualifyPaths<'a> {
153} 156}
154 157
155pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N { 158pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N {
156 let syntax = node.syntax(); 159 SyntaxRewriter::from_fn(|element| match element {
157 let result = ra_syntax::algo::replace_descendants(syntax, |element| match element {
158 ra_syntax::SyntaxElement::Node(n) => { 160 ra_syntax::SyntaxElement::Node(n) => {
159 let replacement = transformer.get_substitution(&n)?; 161 let replacement = transformer.get_substitution(&n)?;
160 Some(replacement.into()) 162 Some(replacement.into())
161 } 163 }
162 _ => None, 164 _ => None,
163 }); 165 })
164 N::cast(result).unwrap() 166 .rewrite_ast(&node)
165} 167}
166 168
167impl<'a> AstTransform<'a> for QualifyPaths<'a> { 169impl<'a> AstTransform<'a> for QualifyPaths<'a> {
diff --git a/crates/ra_assists/src/handlers/merge_imports.rs b/crates/ra_assists/src/handlers/merge_imports.rs
index 89bc975bd..9c57d1e30 100644
--- a/crates/ra_assists/src/handlers/merge_imports.rs
+++ b/crates/ra_assists/src/handlers/merge_imports.rs
@@ -1,9 +1,9 @@
1use std::iter::successors; 1use std::iter::successors;
2 2
3use ra_syntax::{ 3use ra_syntax::{
4 algo::neighbor, 4 algo::{neighbor, SyntaxRewriter},
5 ast::{self, edit::AstNodeEdit, make}, 5 ast::{self, edit::AstNodeEdit, make},
6 AstNode, AstToken, Direction, InsertPosition, SyntaxElement, TextRange, T, 6 AstNode, Direction, InsertPosition, SyntaxElement, T,
7}; 7};
8 8
9use crate::{Assist, AssistCtx, AssistId}; 9use crate::{Assist, AssistCtx, AssistId};
@@ -22,9 +22,10 @@ use crate::{Assist, AssistCtx, AssistId};
22// ``` 22// ```
23pub(crate) fn merge_imports(ctx: AssistCtx) -> Option<Assist> { 23pub(crate) fn merge_imports(ctx: AssistCtx) -> Option<Assist> {
24 let tree: ast::UseTree = ctx.find_node_at_offset()?; 24 let tree: ast::UseTree = ctx.find_node_at_offset()?;
25 let (new_tree, to_delete) = if let Some(use_item) = 25 let mut rewriter = SyntaxRewriter::default();
26 tree.syntax().parent().and_then(ast::UseItem::cast) 26 let mut offset = ctx.frange.range.start();
27 { 27
28 if let Some(use_item) = tree.syntax().parent().and_then(ast::UseItem::cast) {
28 let (merged, to_delete) = next_prev() 29 let (merged, to_delete) = next_prev()
29 .filter_map(|dir| neighbor(&use_item, dir)) 30 .filter_map(|dir| neighbor(&use_item, dir))
30 .filter_map(|it| Some((it.clone(), it.use_tree()?))) 31 .filter_map(|it| Some((it.clone(), it.use_tree()?)))
@@ -32,42 +33,28 @@ pub(crate) fn merge_imports(ctx: AssistCtx) -> Option<Assist> {
32 Some((try_merge_trees(&tree, &use_tree)?, use_item.clone())) 33 Some((try_merge_trees(&tree, &use_tree)?, use_item.clone()))
33 })?; 34 })?;
34 35
35 let mut range = to_delete.syntax().text_range(); 36 rewriter.replace_ast(&tree, &merged);
36 let next_ws = to_delete 37 rewriter += to_delete.remove();
37 .syntax() 38
38 .next_sibling_or_token() 39 if to_delete.syntax().text_range().end() < offset {
39 .and_then(|it| it.into_token()) 40 offset -= to_delete.syntax().text_range().len();
40 .and_then(ast::Whitespace::cast);
41 if let Some(ws) = next_ws {
42 range = range.extend_to(&ws.syntax().text_range())
43 } 41 }
44 (merged, range)
45 } else { 42 } else {
46 let (merged, to_delete) = next_prev() 43 let (merged, to_delete) = next_prev()
47 .filter_map(|dir| neighbor(&tree, dir)) 44 .filter_map(|dir| neighbor(&tree, dir))
48 .find_map(|use_tree| Some((try_merge_trees(&tree, &use_tree)?, use_tree.clone())))?; 45 .find_map(|use_tree| Some((try_merge_trees(&tree, &use_tree)?, use_tree.clone())))?;
49 46
50 let mut range = to_delete.syntax().text_range(); 47 rewriter.replace_ast(&tree, &merged);
51 if let Some((dir, nb)) = next_prev().find_map(|dir| Some((dir, neighbor(&to_delete, dir)?))) 48 rewriter += to_delete.remove();
52 { 49
53 let nb_range = nb.syntax().text_range(); 50 if to_delete.syntax().text_range().end() < offset {
54 if dir == Direction::Prev { 51 offset -= to_delete.syntax().text_range().len();
55 range = TextRange::from_to(nb_range.end(), range.end());
56 } else {
57 range = TextRange::from_to(range.start(), nb_range.start());
58 }
59 } 52 }
60 (merged, range)
61 }; 53 };
62 54
63 let mut offset = ctx.frange.range.start();
64 ctx.add_assist(AssistId("merge_imports"), "Merge imports", |edit| { 55 ctx.add_assist(AssistId("merge_imports"), "Merge imports", |edit| {
65 edit.replace_ast(tree, new_tree); 56 edit.rewrite(rewriter);
66 edit.delete(to_delete); 57 // FIXME: we only need because our diff is imprecise
67
68 if to_delete.end() <= offset {
69 offset -= to_delete.len();
70 }
71 edit.set_cursor(offset); 58 edit.set_cursor(offset);
72 }) 59 })
73} 60}
@@ -156,7 +143,7 @@ use std::fmt::Debug;
156use std::fmt<|>::Display; 143use std::fmt<|>::Display;
157", 144",
158 r" 145 r"
159use std::fmt<|>::{Display, Debug}; 146use std::fmt:<|>:{Display, Debug};
160", 147",
161 ); 148 );
162 } 149 }
@@ -178,7 +165,57 @@ use std::{fmt<|>::{Debug, Display}};
178use std::{fmt::Debug, fmt<|>::Display}; 165use std::{fmt::Debug, fmt<|>::Display};
179", 166",
180 r" 167 r"
181use std::{fmt<|>::{Display, Debug}}; 168use std::{fmt::<|>{Display, Debug}};
169",
170 );
171 }
172
173 #[test]
174 fn removes_just_enough_whitespace() {
175 check_assist(
176 merge_imports,
177 r"
178use foo<|>::bar;
179use foo::baz;
180
181/// Doc comment
182",
183 r"
184use foo<|>::{bar, baz};
185
186/// Doc comment
187",
188 );
189 }
190
191 #[test]
192 fn works_with_trailing_comma() {
193 check_assist(
194 merge_imports,
195 r"
196use {
197 foo<|>::bar,
198 foo::baz,
199};
200",
201 r"
202use {
203 foo<|>::{bar, baz},
204};
205",
206 );
207 check_assist(
208 merge_imports,
209 r"
210use {
211 foo::baz,
212 foo<|>::bar,
213};
214",
215 r"
216use {
217 foo::{bar<|>, baz},
218};
182", 219",
183 ); 220 );
184 } 221 }
diff --git a/crates/ra_hir_expand/src/eager.rs b/crates/ra_hir_expand/src/eager.rs
index 4cbce4df5..687d40294 100644
--- a/crates/ra_hir_expand/src/eager.rs
+++ b/crates/ra_hir_expand/src/eager.rs
@@ -26,8 +26,8 @@ use crate::{
26}; 26};
27 27
28use ra_parser::FragmentKind; 28use ra_parser::FragmentKind;
29use ra_syntax::{algo::replace_descendants, SyntaxElement, SyntaxNode}; 29use ra_syntax::{algo::SyntaxRewriter, SyntaxNode};
30use std::{collections::HashMap, sync::Arc}; 30use std::sync::Arc;
31 31
32pub fn expand_eager_macro( 32pub fn expand_eager_macro(
33 db: &dyn AstDatabase, 33 db: &dyn AstDatabase,
@@ -95,10 +95,10 @@ fn eager_macro_recur(
95 curr: InFile<SyntaxNode>, 95 curr: InFile<SyntaxNode>,
96 macro_resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>, 96 macro_resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>,
97) -> Option<SyntaxNode> { 97) -> Option<SyntaxNode> {
98 let mut original = curr.value.clone(); 98 let original = curr.value.clone();
99 99
100 let children = curr.value.descendants().filter_map(ast::MacroCall::cast); 100 let children = curr.value.descendants().filter_map(ast::MacroCall::cast);
101 let mut replaces: HashMap<SyntaxElement, SyntaxElement> = HashMap::default(); 101 let mut rewriter = SyntaxRewriter::default();
102 102
103 // Collect replacement 103 // Collect replacement
104 for child in children { 104 for child in children {
@@ -119,12 +119,9 @@ fn eager_macro_recur(
119 } 119 }
120 }; 120 };
121 121
122 replaces.insert(child.syntax().clone().into(), insert.into()); 122 rewriter.replace(child.syntax(), &insert);
123 } 123 }
124 124
125 if !replaces.is_empty() { 125 let res = rewriter.rewrite(&original);
126 original = replace_descendants(&original, |n| replaces.get(n).cloned()); 126 Some(res)
127 }
128
129 Some(original)
130} 127}
diff --git a/crates/ra_ide/src/expand_macro.rs b/crates/ra_ide/src/expand_macro.rs
index e58526f31..f536ba3e7 100644
--- a/crates/ra_ide/src/expand_macro.rs
+++ b/crates/ra_ide/src/expand_macro.rs
@@ -3,10 +3,9 @@
3use hir::Semantics; 3use hir::Semantics;
4use ra_ide_db::RootDatabase; 4use ra_ide_db::RootDatabase;
5use ra_syntax::{ 5use ra_syntax::{
6 algo::{find_node_at_offset, replace_descendants}, 6 algo::{find_node_at_offset, SyntaxRewriter},
7 ast, AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, WalkEvent, T, 7 ast, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, WalkEvent, T,
8}; 8};
9use rustc_hash::FxHashMap;
10 9
11use crate::FilePosition; 10use crate::FilePosition;
12 11
@@ -37,7 +36,7 @@ fn expand_macro_recur(
37 let mut expanded = sema.expand(macro_call)?; 36 let mut expanded = sema.expand(macro_call)?;
38 37
39 let children = expanded.descendants().filter_map(ast::MacroCall::cast); 38 let children = expanded.descendants().filter_map(ast::MacroCall::cast);
40 let mut replaces: FxHashMap<SyntaxElement, SyntaxElement> = FxHashMap::default(); 39 let mut rewriter = SyntaxRewriter::default();
41 40
42 for child in children.into_iter() { 41 for child in children.into_iter() {
43 if let Some(new_node) = expand_macro_recur(sema, &child) { 42 if let Some(new_node) = expand_macro_recur(sema, &child) {
@@ -47,12 +46,13 @@ fn expand_macro_recur(
47 if expanded == *child.syntax() { 46 if expanded == *child.syntax() {
48 expanded = new_node; 47 expanded = new_node;
49 } else { 48 } else {
50 replaces.insert(child.syntax().clone().into(), new_node.into()); 49 rewriter.replace(child.syntax(), &new_node)
51 } 50 }
52 } 51 }
53 } 52 }
54 53
55 Some(replace_descendants(&expanded, |n| replaces.get(n).cloned())) 54 let res = rewriter.rewrite(&expanded);
55 Some(res)
56} 56}
57 57
58// FIXME: It would also be cool to share logic here and in the mbe tests, 58// FIXME: It would also be cool to share logic here and in the mbe tests,
diff --git a/crates/ra_ide/src/inlay_hints.rs b/crates/ra_ide/src/inlay_hints.rs
index ecd615cf4..f4f0751c0 100644
--- a/crates/ra_ide/src/inlay_hints.rs
+++ b/crates/ra_ide/src/inlay_hints.rs
@@ -5,7 +5,7 @@ use ra_ide_db::RootDatabase;
5use ra_prof::profile; 5use ra_prof::profile;
6use ra_syntax::{ 6use ra_syntax::{
7 ast::{self, ArgListOwner, AstNode, TypeAscriptionOwner}, 7 ast::{self, ArgListOwner, AstNode, TypeAscriptionOwner},
8 match_ast, SmolStr, TextRange, 8 match_ast, Direction, NodeOrToken, SmolStr, SyntaxKind, TextRange,
9}; 9};
10 10
11use crate::{FileId, FunctionSignature}; 11use crate::{FileId, FunctionSignature};
@@ -14,12 +14,13 @@ use crate::{FileId, FunctionSignature};
14pub struct InlayHintsOptions { 14pub struct InlayHintsOptions {
15 pub type_hints: bool, 15 pub type_hints: bool,
16 pub parameter_hints: bool, 16 pub parameter_hints: bool,
17 pub chaining_hints: bool,
17 pub max_length: Option<usize>, 18 pub max_length: Option<usize>,
18} 19}
19 20
20impl Default for InlayHintsOptions { 21impl Default for InlayHintsOptions {
21 fn default() -> Self { 22 fn default() -> Self {
22 Self { type_hints: true, parameter_hints: true, max_length: None } 23 Self { type_hints: true, parameter_hints: true, chaining_hints: true, max_length: None }
23 } 24 }
24} 25}
25 26
@@ -27,6 +28,7 @@ impl Default for InlayHintsOptions {
27pub enum InlayKind { 28pub enum InlayKind {
28 TypeHint, 29 TypeHint,
29 ParameterHint, 30 ParameterHint,
31 ChainingHint,
30} 32}
31 33
32#[derive(Debug)] 34#[derive(Debug)]
@@ -47,6 +49,10 @@ pub(crate) fn inlay_hints(
47 49
48 let mut res = Vec::new(); 50 let mut res = Vec::new();
49 for node in file.syntax().descendants() { 51 for node in file.syntax().descendants() {
52 if let Some(expr) = ast::Expr::cast(node.clone()) {
53 get_chaining_hints(&mut res, &sema, options, expr);
54 }
55
50 match_ast! { 56 match_ast! {
51 match node { 57 match node {
52 ast::CallExpr(it) => { get_param_name_hints(&mut res, &sema, options, ast::Expr::from(it)); }, 58 ast::CallExpr(it) => { get_param_name_hints(&mut res, &sema, options, ast::Expr::from(it)); },
@@ -59,6 +65,46 @@ pub(crate) fn inlay_hints(
59 res 65 res
60} 66}
61 67
68fn get_chaining_hints(
69 acc: &mut Vec<InlayHint>,
70 sema: &Semantics<RootDatabase>,
71 options: &InlayHintsOptions,
72 expr: ast::Expr,
73) -> Option<()> {
74 if !options.chaining_hints {
75 return None;
76 }
77
78 let ty = sema.type_of_expr(&expr)?;
79 if ty.is_unknown() {
80 return None;
81 }
82
83 let mut tokens = expr
84 .syntax()
85 .siblings_with_tokens(Direction::Next)
86 .filter_map(NodeOrToken::into_token)
87 .filter(|t| match t.kind() {
88 SyntaxKind::WHITESPACE if !t.text().contains('\n') => false,
89 SyntaxKind::COMMENT => false,
90 _ => true,
91 });
92
93 // Chaining can be defined as an expression whose next sibling tokens are newline and dot
94 // Ignoring extra whitespace and comments
95 let next = tokens.next()?.kind();
96 let next_next = tokens.next()?.kind();
97 if next == SyntaxKind::WHITESPACE && next_next == SyntaxKind::DOT {
98 let label = ty.display_truncated(sema.db, options.max_length).to_string();
99 acc.push(InlayHint {
100 range: expr.syntax().text_range(),
101 kind: InlayKind::ChainingHint,
102 label: label.into(),
103 });
104 }
105 Some(())
106}
107
62fn get_param_name_hints( 108fn get_param_name_hints(
63 acc: &mut Vec<InlayHint>, 109 acc: &mut Vec<InlayHint>,
64 sema: &Semantics<RootDatabase>, 110 sema: &Semantics<RootDatabase>,
@@ -238,7 +284,7 @@ mod tests {
238 let _x = foo(4, 4); 284 let _x = foo(4, 4);
239 }"#, 285 }"#,
240 ); 286 );
241 assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsOptions{ parameter_hints: true, type_hints: false, max_length: None}).unwrap(), @r###" 287 assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsOptions{ parameter_hints: true, type_hints: false, chaining_hints: false, max_length: None}).unwrap(), @r###"
242 [ 288 [
243 InlayHint { 289 InlayHint {
244 range: [106; 107), 290 range: [106; 107),
@@ -262,7 +308,7 @@ mod tests {
262 let _x = foo(4, 4); 308 let _x = foo(4, 4);
263 }"#, 309 }"#,
264 ); 310 );
265 assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsOptions{ type_hints: false, parameter_hints: false, max_length: None}).unwrap(), @r###"[]"###); 311 assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsOptions{ type_hints: false, parameter_hints: false, chaining_hints: false, max_length: None}).unwrap(), @r###"[]"###);
266 } 312 }
267 313
268 #[test] 314 #[test]
@@ -274,7 +320,7 @@ mod tests {
274 let _x = foo(4, 4); 320 let _x = foo(4, 4);
275 }"#, 321 }"#,
276 ); 322 );
277 assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsOptions{ type_hints: true, parameter_hints: false, max_length: None}).unwrap(), @r###" 323 assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsOptions{ type_hints: true, parameter_hints: false, chaining_hints: false, max_length: None}).unwrap(), @r###"
278 [ 324 [
279 InlayHint { 325 InlayHint {
280 range: [97; 99), 326 range: [97; 99),
@@ -1052,4 +1098,124 @@ fn main() {
1052 "### 1098 "###
1053 ); 1099 );
1054 } 1100 }
1101
1102 #[test]
1103 fn chaining_hints_ignore_comments() {
1104 let (analysis, file_id) = single_file(
1105 r#"
1106 struct A(B);
1107 impl A { fn into_b(self) -> B { self.0 } }
1108 struct B(C);
1109 impl B { fn into_c(self) -> C { self.0 } }
1110 struct C;
1111
1112 fn main() {
1113 let c = A(B(C))
1114 .into_b() // This is a comment
1115 .into_c();
1116 }"#,
1117 );
1118 assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsOptions{ parameter_hints: false, type_hints: false, chaining_hints: true, max_length: None}).unwrap(), @r###"
1119 [
1120 InlayHint {
1121 range: [232; 269),
1122 kind: ChainingHint,
1123 label: "B",
1124 },
1125 InlayHint {
1126 range: [232; 239),
1127 kind: ChainingHint,
1128 label: "A",
1129 },
1130 ]"###);
1131 }
1132
1133 #[test]
1134 fn chaining_hints_without_newlines() {
1135 let (analysis, file_id) = single_file(
1136 r#"
1137 struct A(B);
1138 impl A { fn into_b(self) -> B { self.0 } }
1139 struct B(C);
1140 impl B { fn into_c(self) -> C { self.0 } }
1141 struct C;
1142
1143 fn main() {
1144 let c = A(B(C)).into_b().into_c();
1145 }"#,
1146 );
1147 assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsOptions{ parameter_hints: false, type_hints: false, chaining_hints: true, max_length: None}).unwrap(), @r###"[]"###);
1148 }
1149
1150 #[test]
1151 fn struct_access_chaining_hints() {
1152 let (analysis, file_id) = single_file(
1153 r#"
1154 struct A { pub b: B }
1155 struct B { pub c: C }
1156 struct C(pub bool);
1157
1158 fn main() {
1159 let x = A { b: B { c: C(true) } }
1160 .b
1161 .c
1162 .0;
1163 }"#,
1164 );
1165 assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsOptions{ parameter_hints: false, type_hints: false, chaining_hints: true, max_length: None}).unwrap(), @r###"
1166 [
1167 InlayHint {
1168 range: [150; 221),
1169 kind: ChainingHint,
1170 label: "C",
1171 },
1172 InlayHint {
1173 range: [150; 198),
1174 kind: ChainingHint,
1175 label: "B",
1176 },
1177 InlayHint {
1178 range: [150; 175),
1179 kind: ChainingHint,
1180 label: "A",
1181 },
1182 ]"###);
1183 }
1184
1185 #[test]
1186 fn generic_chaining_hints() {
1187 let (analysis, file_id) = single_file(
1188 r#"
1189 struct A<T>(T);
1190 struct B<T>(T);
1191 struct C<T>(T);
1192 struct X<T,R>(T, R);
1193
1194 impl<T> A<T> {
1195 fn new(t: T) -> Self { A(t) }
1196 fn into_b(self) -> B<T> { B(self.0) }
1197 }
1198 impl<T> B<T> {
1199 fn into_c(self) -> C<T> { C(self.0) }
1200 }
1201 fn main() {
1202 let c = A::new(X(42, true))
1203 .into_b()
1204 .into_c();
1205 }"#,
1206 );
1207 assert_debug_snapshot!(analysis.inlay_hints(file_id, &InlayHintsOptions{ parameter_hints: false, type_hints: false, chaining_hints: true, max_length: None}).unwrap(), @r###"
1208 [
1209 InlayHint {
1210 range: [403; 452),
1211 kind: ChainingHint,
1212 label: "B<X<i32, bool>>",
1213 },
1214 InlayHint {
1215 range: [403; 422),
1216 kind: ChainingHint,
1217 label: "A<X<i32, bool>>",
1218 },
1219 ]"###);
1220 }
1055} 1221}
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index fcb73fbc7..e3cde9eed 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -3,12 +3,11 @@
3use ra_parser::{FragmentKind, ParseError, TreeSink}; 3use ra_parser::{FragmentKind, ParseError, TreeSink};
4use ra_syntax::{ 4use ra_syntax::{
5 ast::{self, make::tokens::doc_comment}, 5 ast::{self, make::tokens::doc_comment},
6 tokenize, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, 6 tokenize, AstToken, Parse, SmolStr, SyntaxKind,
7 SyntaxKind::*, 7 SyntaxKind::*,
8 SyntaxNode, SyntaxTreeBuilder, TextRange, TextUnit, Token, T, 8 SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextUnit, Token as RawToken, T,
9}; 9};
10use rustc_hash::FxHashMap; 10use rustc_hash::FxHashMap;
11use std::iter::successors;
12use tt::buffer::{Cursor, TokenBuffer}; 11use tt::buffer::{Cursor, TokenBuffer};
13 12
14use crate::subtree_source::SubtreeTokenSource; 13use crate::subtree_source::SubtreeTokenSource;
@@ -50,10 +49,8 @@ pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> Option<(tt::Subtree, TokenM
50/// will consume). 49/// will consume).
51pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> { 50pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> {
52 let global_offset = node.text_range().start(); 51 let global_offset = node.text_range().start();
53 let mut c = Convertor { 52 let mut c = Convertor::new(node, global_offset);
54 id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } }, 53 let subtree = c.go()?;
55 };
56 let subtree = c.go(node)?;
57 Some((subtree, c.id_alloc.map)) 54 Some((subtree, c.id_alloc.map))
58} 55}
59 56
@@ -152,6 +149,10 @@ impl TokenMap {
152 } 149 }
153 } 150 }
154 } 151 }
152
153 fn remove_delim(&mut self, token_id: tt::TokenId) {
154 self.entries.retain(|(tid, _)| *tid != token_id);
155 }
155} 156}
156 157
157/// Returns the textual content of a doc comment block as a quoted string 158/// Returns the textual content of a doc comment block as a quoted string
@@ -237,25 +238,26 @@ impl TokenIdAlloc {
237 token_id 238 token_id
238 } 239 }
239 240
240 fn delim(&mut self, open_abs_range: TextRange, close_abs_range: TextRange) -> tt::TokenId {
241 let open_relative_range = open_abs_range - self.global_offset;
242 let close_relative_range = close_abs_range - self.global_offset;
243 let token_id = tt::TokenId(self.next_id);
244 self.next_id += 1;
245
246 self.map.insert_delim(token_id, open_relative_range, close_relative_range);
247 token_id
248 }
249
250 fn open_delim(&mut self, open_abs_range: TextRange) -> tt::TokenId { 241 fn open_delim(&mut self, open_abs_range: TextRange) -> tt::TokenId {
251 let token_id = tt::TokenId(self.next_id); 242 let token_id = tt::TokenId(self.next_id);
252 self.next_id += 1; 243 self.next_id += 1;
253 self.map.insert_delim(token_id, open_abs_range, open_abs_range); 244 self.map.insert_delim(
245 token_id,
246 open_abs_range - self.global_offset,
247 open_abs_range - self.global_offset,
248 );
254 token_id 249 token_id
255 } 250 }
256 251
257 fn close_delim(&mut self, id: tt::TokenId, close_abs_range: TextRange) { 252 fn close_delim(&mut self, id: tt::TokenId, close_abs_range: Option<TextRange>) {
258 self.map.update_close_delim(id, close_abs_range); 253 match close_abs_range {
254 None => {
255 self.map.remove_delim(id);
256 }
257 Some(close) => {
258 self.map.update_close_delim(id, close - self.global_offset);
259 }
260 }
259 } 261 }
260} 262}
261 263
@@ -264,10 +266,20 @@ struct RawConvertor<'a> {
264 text: &'a str, 266 text: &'a str,
265 offset: TextUnit, 267 offset: TextUnit,
266 id_alloc: TokenIdAlloc, 268 id_alloc: TokenIdAlloc,
267 inner: std::slice::Iter<'a, Token>, 269 inner: std::slice::Iter<'a, RawToken>,
268} 270}
269 271
270impl RawConvertor<'_> { 272trait SrcToken {
273 fn kind(&self) -> SyntaxKind;
274
275 fn to_char(&self) -> Option<char>;
276
277 fn to_text(&self) -> SmolStr;
278}
279
280trait TokenConvertor {
281 type Token: SrcToken;
282
271 fn go(&mut self) -> Option<tt::Subtree> { 283 fn go(&mut self) -> Option<tt::Subtree> {
272 let mut subtree = tt::Subtree::default(); 284 let mut subtree = tt::Subtree::default();
273 subtree.delimiter = None; 285 subtree.delimiter = None;
@@ -285,33 +297,22 @@ impl RawConvertor<'_> {
285 Some(subtree) 297 Some(subtree)
286 } 298 }
287 299
288 fn bump(&mut self) -> Option<(Token, TextRange)> {
289 let token = self.inner.next()?;
290 let range = TextRange::offset_len(self.offset, token.len);
291 self.offset += token.len;
292 Some((*token, range))
293 }
294
295 fn peek(&self) -> Option<Token> {
296 self.inner.as_slice().get(0).cloned()
297 }
298
299 fn collect_leaf(&mut self, result: &mut Vec<tt::TokenTree>) { 300 fn collect_leaf(&mut self, result: &mut Vec<tt::TokenTree>) {
300 let (token, range) = match self.bump() { 301 let (token, range) = match self.bump() {
301 None => return, 302 None => return,
302 Some(it) => it, 303 Some(it) => it,
303 }; 304 };
304 305
305 let k: SyntaxKind = token.kind; 306 let k: SyntaxKind = token.kind();
306 if k == COMMENT { 307 if k == COMMENT {
307 let node = doc_comment(&self.text[range]); 308 if let Some(tokens) = self.convert_doc_comment(&token) {
308 if let Some(tokens) = convert_doc_comment(&node) {
309 result.extend(tokens); 309 result.extend(tokens);
310 } 310 }
311 return; 311 return;
312 } 312 }
313 313
314 result.push(if k.is_punct() { 314 result.push(if k.is_punct() {
315 assert_eq!(range.len().to_usize(), 1);
315 let delim = match k { 316 let delim = match k {
316 T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])), 317 T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])),
317 T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])), 318 T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])),
@@ -321,40 +322,51 @@ impl RawConvertor<'_> {
321 322
322 if let Some((kind, closed)) = delim { 323 if let Some((kind, closed)) = delim {
323 let mut subtree = tt::Subtree::default(); 324 let mut subtree = tt::Subtree::default();
324 let id = self.id_alloc.open_delim(range); 325 let id = self.id_alloc().open_delim(range);
325 subtree.delimiter = Some(tt::Delimiter { kind, id }); 326 subtree.delimiter = Some(tt::Delimiter { kind, id });
326 327
327 while self.peek().map(|it| it.kind != closed).unwrap_or(false) { 328 while self.peek().map(|it| it.kind() != closed).unwrap_or(false) {
328 self.collect_leaf(&mut subtree.token_trees); 329 self.collect_leaf(&mut subtree.token_trees);
329 } 330 }
330 let last_range = match self.bump() { 331 let last_range = match self.bump() {
331 None => return, 332 None => {
333 // For error resilience, we insert an char punct for the opening delim here
334 self.id_alloc().close_delim(id, None);
335 let leaf: tt::Leaf = tt::Punct {
336 id: self.id_alloc().alloc(range),
337 char: token.to_char().unwrap(),
338 spacing: tt::Spacing::Alone,
339 }
340 .into();
341 result.push(leaf.into());
342 result.extend(subtree.token_trees);
343 return;
344 }
332 Some(it) => it.1, 345 Some(it) => it.1,
333 }; 346 };
334 self.id_alloc.close_delim(id, last_range); 347 self.id_alloc().close_delim(id, Some(last_range));
335 subtree.into() 348 subtree.into()
336 } else { 349 } else {
337 let spacing = match self.peek() { 350 let spacing = match self.peek() {
338 Some(next) 351 Some(next)
339 if next.kind.is_trivia() 352 if next.kind().is_trivia()
340 || next.kind == T!['['] 353 || next.kind() == T!['[']
341 || next.kind == T!['{'] 354 || next.kind() == T!['{']
342 || next.kind == T!['('] => 355 || next.kind() == T!['('] =>
343 { 356 {
344 tt::Spacing::Alone 357 tt::Spacing::Alone
345 } 358 }
346 Some(next) if next.kind.is_punct() => tt::Spacing::Joint, 359 Some(next) if next.kind().is_punct() => tt::Spacing::Joint,
347 _ => tt::Spacing::Alone, 360 _ => tt::Spacing::Alone,
348 }; 361 };
349 let char = 362 let char = token.to_char().expect("Token from lexer must be single char");
350 self.text[range].chars().next().expect("Token from lexer must be single char");
351 363
352 tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc.alloc(range) }).into() 364 tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc().alloc(range) }).into()
353 } 365 }
354 } else { 366 } else {
355 macro_rules! make_leaf { 367 macro_rules! make_leaf {
356 ($i:ident) => { 368 ($i:ident) => {
357 tt::$i { id: self.id_alloc.alloc(range), text: self.text[range].into() }.into() 369 tt::$i { id: self.id_alloc().alloc(range), text: token.to_text() }.into()
358 }; 370 };
359 } 371 }
360 let leaf: tt::Leaf = match k { 372 let leaf: tt::Leaf = match k {
@@ -368,133 +380,168 @@ impl RawConvertor<'_> {
368 leaf.into() 380 leaf.into()
369 }); 381 });
370 } 382 }
383
384 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>;
385
386 fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
387
388 fn peek(&self) -> Option<Self::Token>;
389
390 fn id_alloc(&mut self) -> &mut TokenIdAlloc;
391}
392
393impl<'a> SrcToken for (RawToken, &'a str) {
394 fn kind(&self) -> SyntaxKind {
395 self.0.kind
396 }
397
398 fn to_char(&self) -> Option<char> {
399 self.1.chars().next()
400 }
401
402 fn to_text(&self) -> SmolStr {
403 self.1.into()
404 }
405}
406
407impl RawConvertor<'_> {}
408
409impl<'a> TokenConvertor for RawConvertor<'a> {
410 type Token = (RawToken, &'a str);
411
412 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
413 convert_doc_comment(&doc_comment(token.1))
414 }
415
416 fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
417 let token = self.inner.next()?;
418 let range = TextRange::offset_len(self.offset, token.len);
419 self.offset += token.len;
420
421 Some(((*token, &self.text[range]), range))
422 }
423
424 fn peek(&self) -> Option<Self::Token> {
425 let token = self.inner.as_slice().get(0).cloned();
426
427 token.map(|it| {
428 let range = TextRange::offset_len(self.offset, it.len);
429 (it, &self.text[range])
430 })
431 }
432
433 fn id_alloc(&mut self) -> &mut TokenIdAlloc {
434 &mut self.id_alloc
435 }
371} 436}
372 437
373// FIXME: There are some duplicate logic between RawConvertor and Convertor
374// It would be nice to refactor to converting SyntaxNode to ra_parser::Token and thus
375// use RawConvertor directly. But performance-wise it may not be a good idea ?
376struct Convertor { 438struct Convertor {
377 id_alloc: TokenIdAlloc, 439 id_alloc: TokenIdAlloc,
440 current: Option<SyntaxToken>,
441 range: TextRange,
442 punct_offset: Option<(SyntaxToken, TextUnit)>,
378} 443}
379 444
380impl Convertor { 445impl Convertor {
381 fn go(&mut self, tt: &SyntaxNode) -> Option<tt::Subtree> { 446 fn new(node: &SyntaxNode, global_offset: TextUnit) -> Convertor {
382 // This tree is empty 447 Convertor {
383 if tt.first_child_or_token().is_none() { 448 id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
384 return Some(tt::Subtree { token_trees: vec![], delimiter: None }); 449 current: node.first_token(),
450 range: node.text_range(),
451 punct_offset: None,
385 } 452 }
453 }
454}
386 455
387 let first_child = tt.first_child_or_token()?; 456enum SynToken {
388 let last_child = tt.last_child_or_token()?; 457 Ordiniary(SyntaxToken),
458 Punch(SyntaxToken, TextUnit),
459}
389 460
390 // ignore trivial first_child and last_child 461impl SynToken {
391 let first_child = successors(Some(first_child), |it| { 462 fn token(&self) -> &SyntaxToken {
392 if it.kind().is_trivia() { 463 match self {
393 it.next_sibling_or_token() 464 SynToken::Ordiniary(it) => it,
394 } else { 465 SynToken::Punch(it, _) => it,
395 None
396 }
397 })
398 .last()
399 .unwrap();
400 if first_child.kind().is_trivia() {
401 return Some(tt::Subtree { token_trees: vec![], delimiter: None });
402 } 466 }
467 }
468}
403 469
404 let last_child = successors(Some(last_child), |it| { 470impl SrcToken for SynToken {
405 if it.kind().is_trivia() { 471 fn kind(&self) -> SyntaxKind {
406 it.prev_sibling_or_token() 472 self.token().kind()
407 } else { 473 }
408 None 474 fn to_char(&self) -> Option<char> {
475 match self {
476 SynToken::Ordiniary(_) => None,
477 SynToken::Punch(it, i) => it.text().chars().nth(i.to_usize()),
478 }
479 }
480 fn to_text(&self) -> SmolStr {
481 self.token().text().clone()
482 }
483}
484
485impl TokenConvertor for Convertor {
486 type Token = SynToken;
487 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
488 convert_doc_comment(token.token())
489 }
490
491 fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
492 if let Some((punct, offset)) = self.punct_offset.clone() {
493 if offset.to_usize() + 1 < punct.text().len() {
494 let offset = offset + TextUnit::from_usize(1);
495 let range = punct.text_range();
496 self.punct_offset = Some((punct.clone(), offset));
497 let range = TextRange::offset_len(range.start() + offset, TextUnit::from_usize(1));
498 return Some((SynToken::Punch(punct, offset), range));
409 } 499 }
410 }) 500 }
411 .last() 501
412 .unwrap(); 502 let curr = self.current.clone()?;
413 503 if !curr.text_range().is_subrange(&self.range) {
414 let (delimiter_kind, skip_first) = match (first_child.kind(), last_child.kind()) { 504 return None;
415 (T!['('], T![')']) => (Some(tt::DelimiterKind::Parenthesis), true), 505 }
416 (T!['{'], T!['}']) => (Some(tt::DelimiterKind::Brace), true), 506 self.current = curr.next_token();
417 (T!['['], T![']']) => (Some(tt::DelimiterKind::Bracket), true), 507
418 _ => (None, false), 508 let token = if curr.kind().is_punct() {
509 let range = curr.text_range();
510 let range = TextRange::offset_len(range.start(), TextUnit::from_usize(1));
511 self.punct_offset = Some((curr.clone(), TextUnit::from_usize(0)));
512 (SynToken::Punch(curr, TextUnit::from_usize(0)), range)
513 } else {
514 self.punct_offset = None;
515 let range = curr.text_range();
516 (SynToken::Ordiniary(curr), range)
419 }; 517 };
420 let delimiter = delimiter_kind.map(|kind| tt::Delimiter {
421 kind,
422 id: self.id_alloc.delim(first_child.text_range(), last_child.text_range()),
423 });
424 518
425 let mut token_trees = Vec::new(); 519 Some(token)
426 let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable(); 520 }
427 521
428 while let Some(child) = child_iter.next() { 522 fn peek(&self) -> Option<Self::Token> {
429 if skip_first && (child == first_child || child == last_child) { 523 if let Some((punct, mut offset)) = self.punct_offset.clone() {
430 continue; 524 offset = offset + TextUnit::from_usize(1);
525 if offset.to_usize() < punct.text().len() {
526 return Some(SynToken::Punch(punct, offset));
431 } 527 }
528 }
432 529
433 match child { 530 let curr = self.current.clone()?;
434 NodeOrToken::Token(token) => { 531 if !curr.text_range().is_subrange(&self.range) {
435 if let Some(doc_tokens) = convert_doc_comment(&token) { 532 return None;
436 token_trees.extend(doc_tokens);
437 } else if token.kind().is_trivia() {
438 continue;
439 } else if token.kind().is_punct() {
440 // we need to pull apart joined punctuation tokens
441 let last_spacing = match child_iter.peek() {
442 Some(NodeOrToken::Token(token)) => {
443 if token.kind().is_punct() {
444 tt::Spacing::Joint
445 } else {
446 tt::Spacing::Alone
447 }
448 }
449 _ => tt::Spacing::Alone,
450 };
451 let spacing_iter = std::iter::repeat(tt::Spacing::Joint)
452 .take(token.text().len() - 1)
453 .chain(std::iter::once(last_spacing));
454 for (char, spacing) in token.text().chars().zip(spacing_iter) {
455 token_trees.push(
456 tt::Leaf::from(tt::Punct {
457 char,
458 spacing,
459 id: self.id_alloc.alloc(token.text_range()),
460 })
461 .into(),
462 );
463 }
464 } else {
465 macro_rules! make_leaf {
466 ($i:ident) => {
467 tt::$i {
468 id: self.id_alloc.alloc(token.text_range()),
469 text: token.text().clone(),
470 }
471 .into()
472 };
473 }
474
475 let child: tt::Leaf = match token.kind() {
476 T![true] | T![false] => make_leaf!(Literal),
477 IDENT | LIFETIME => make_leaf!(Ident),
478 k if k.is_keyword() => make_leaf!(Ident),
479 k if k.is_literal() => make_leaf!(Literal),
480 _ => return None,
481 };
482 token_trees.push(child.into());
483 }
484 }
485 NodeOrToken::Node(node) => {
486 let child_subtree = self.go(&node)?;
487 if child_subtree.delimiter.is_none() && node.kind() != SyntaxKind::TOKEN_TREE {
488 token_trees.extend(child_subtree.token_trees);
489 } else {
490 token_trees.push(child_subtree.into());
491 }
492 }
493 };
494 } 533 }
495 534
496 let res = tt::Subtree { delimiter, token_trees }; 535 let token = if curr.kind().is_punct() {
497 Some(res) 536 SynToken::Punch(curr, TextUnit::from_usize(0))
537 } else {
538 SynToken::Ordiniary(curr)
539 };
540 Some(token)
541 }
542
543 fn id_alloc(&mut self) -> &mut TokenIdAlloc {
544 &mut self.id_alloc
498 } 545 }
499} 546}
500 547
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs
index 44f381938..a7fcea0ac 100644
--- a/crates/ra_mbe/src/tests.rs
+++ b/crates/ra_mbe/src/tests.rs
@@ -427,22 +427,28 @@ MACRO_ITEMS@[0; 40)
427 ); 427 );
428} 428}
429 429
430#[test] 430fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree {
431fn test_expand_literals_to_token_tree() { 431 if let tt::TokenTree::Subtree(subtree) = tt {
432 fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree { 432 return &subtree;
433 if let tt::TokenTree::Subtree(subtree) = tt {
434 return &subtree;
435 }
436 unreachable!("It is not a subtree");
437 } 433 }
434 unreachable!("It is not a subtree");
435}
436fn to_literal(tt: &tt::TokenTree) -> &tt::Literal {
437 if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = tt {
438 return lit;
439 }
440 unreachable!("It is not a literal");
441}
438 442
439 fn to_literal(tt: &tt::TokenTree) -> &tt::Literal { 443fn to_punct(tt: &tt::TokenTree) -> &tt::Punct {
440 if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = tt { 444 if let tt::TokenTree::Leaf(tt::Leaf::Punct(lit)) = tt {
441 return lit; 445 return lit;
442 }
443 unreachable!("It is not a literal");
444 } 446 }
447 unreachable!("It is not a Punct");
448}
445 449
450#[test]
451fn test_expand_literals_to_token_tree() {
446 let expansion = parse_macro( 452 let expansion = parse_macro(
447 r#" 453 r#"
448 macro_rules! literals { 454 macro_rules! literals {
@@ -471,6 +477,22 @@ fn test_expand_literals_to_token_tree() {
471} 477}
472 478
473#[test] 479#[test]
480fn test_attr_to_token_tree() {
481 let expansion = parse_to_token_tree_by_syntax(
482 r#"
483 #[derive(Copy)]
484 struct Foo;
485 "#,
486 );
487
488 assert_eq!(to_punct(&expansion.token_trees[0]).char, '#');
489 assert_eq!(
490 to_subtree(&expansion.token_trees[1]).delimiter_kind(),
491 Some(tt::DelimiterKind::Bracket)
492 );
493}
494
495#[test]
474fn test_two_idents() { 496fn test_two_idents() {
475 parse_macro( 497 parse_macro(
476 r#" 498 r#"
@@ -1427,8 +1449,8 @@ impl MacroFixture {
1427 let macro_invocation = 1449 let macro_invocation =
1428 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); 1450 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
1429 1451
1430 let (invocation_tt, _) = 1452 let (invocation_tt, _) = ast_to_token_tree(&macro_invocation.token_tree().unwrap())
1431 ast_to_token_tree(&macro_invocation.token_tree().unwrap()).unwrap(); 1453 .ok_or_else(|| ExpandError::ConversionError)?;
1432 1454
1433 self.rules.expand(&invocation_tt).result() 1455 self.rules.expand(&invocation_tt).result()
1434 } 1456 }
@@ -1517,6 +1539,16 @@ pub(crate) fn parse_macro(ra_fixture: &str) -> MacroFixture {
1517 MacroFixture { rules } 1539 MacroFixture { rules }
1518} 1540}
1519 1541
1542pub(crate) fn parse_to_token_tree_by_syntax(ra_fixture: &str) -> tt::Subtree {
1543 let source_file = ast::SourceFile::parse(ra_fixture).ok().unwrap();
1544 let tt = syntax_node_to_token_tree(source_file.syntax()).unwrap().0;
1545
1546 let parsed = parse_to_token_tree(ra_fixture).unwrap().0;
1547 assert_eq!(tt, parsed);
1548
1549 parsed
1550}
1551
1520fn debug_dump_ignore_spaces(node: &ra_syntax::SyntaxNode) -> String { 1552fn debug_dump_ignore_spaces(node: &ra_syntax::SyntaxNode) -> String {
1521 let mut level = 0; 1553 let mut level = 0;
1522 let mut buf = String::new(); 1554 let mut buf = String::new();
@@ -1662,5 +1694,5 @@ fn test_expand_bad_literal() {
1662 macro_rules! foo { ($i:literal) => {}; } 1694 macro_rules! foo { ($i:literal) => {}; }
1663 "#, 1695 "#,
1664 ) 1696 )
1665 .assert_expand_err(r#"foo!(&k");"#, &ExpandError::BindingError("".to_string())); 1697 .assert_expand_err(r#"foo!(&k");"#, &ExpandError::BindingError("".into()));
1666} 1698}
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs
index ffdbdc767..4d463a3ef 100644
--- a/crates/ra_syntax/src/algo.rs
+++ b/crates/ra_syntax/src/algo.rs
@@ -1,6 +1,9 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use std::ops::RangeInclusive; 3use std::{
4 fmt,
5 ops::{self, RangeInclusive},
6};
4 7
5use itertools::Itertools; 8use itertools::Itertools;
6use ra_text_edit::TextEditBuilder; 9use ra_text_edit::TextEditBuilder;
@@ -222,44 +225,121 @@ fn _replace_children(
222 with_children(parent, new_children) 225 with_children(parent, new_children)
223} 226}
224 227
225/// Replaces descendants in the node, according to the mapping. 228#[derive(Default)]
226/// 229pub struct SyntaxRewriter<'a> {
227/// This is a type-unsafe low-level editing API, if you need to use it, prefer 230 f: Option<Box<dyn Fn(&SyntaxElement) -> Option<SyntaxElement> + 'a>>,
228/// to create a type-safe abstraction on top of it instead. 231 //FIXME: add debug_assertions that all elements are in fact from the same file.
229pub fn replace_descendants( 232 replacements: FxHashMap<SyntaxElement, Replacement>,
230 parent: &SyntaxNode,
231 map: impl Fn(&SyntaxElement) -> Option<SyntaxElement>,
232) -> SyntaxNode {
233 _replace_descendants(parent, &map)
234} 233}
235 234
236fn _replace_descendants( 235impl fmt::Debug for SyntaxRewriter<'_> {
237 parent: &SyntaxNode, 236 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
238 map: &dyn Fn(&SyntaxElement) -> Option<SyntaxElement>, 237 f.debug_struct("SyntaxRewriter").field("replacements", &self.replacements).finish()
239) -> SyntaxNode { 238 }
240 // FIXME: this could be made much faster. 239}
241 let new_children = parent.children_with_tokens().map(|it| go(map, it)).collect::<Vec<_>>();
242 return with_children(parent, new_children);
243 240
244 fn go( 241impl<'a> SyntaxRewriter<'a> {
245 map: &dyn Fn(&SyntaxElement) -> Option<SyntaxElement>, 242 pub fn from_fn(f: impl Fn(&SyntaxElement) -> Option<SyntaxElement> + 'a) -> SyntaxRewriter<'a> {
246 element: SyntaxElement, 243 SyntaxRewriter { f: Some(Box::new(f)), replacements: FxHashMap::default() }
247 ) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { 244 }
248 if let Some(replacement) = map(&element) { 245 pub fn delete<T: Clone + Into<SyntaxElement>>(&mut self, what: &T) {
246 let what = what.clone().into();
247 let replacement = Replacement::Delete;
248 self.replacements.insert(what, replacement);
249 }
250 pub fn replace<T: Clone + Into<SyntaxElement>>(&mut self, what: &T, with: &T) {
251 let what = what.clone().into();
252 let replacement = Replacement::Single(with.clone().into());
253 self.replacements.insert(what, replacement);
254 }
255 pub fn replace_ast<T: AstNode>(&mut self, what: &T, with: &T) {
256 self.replace(what.syntax(), with.syntax())
257 }
258
259 pub fn rewrite(&self, node: &SyntaxNode) -> SyntaxNode {
260 if self.f.is_none() && self.replacements.is_empty() {
261 return node.clone();
262 }
263 self.rewrite_children(node)
264 }
265
266 pub fn rewrite_ast<N: AstNode>(self, node: &N) -> N {
267 N::cast(self.rewrite(node.syntax())).unwrap()
268 }
269
270 pub fn rewrite_root(&self) -> Option<SyntaxNode> {
271 assert!(self.f.is_none());
272 self.replacements
273 .keys()
274 .map(|element| match element {
275 SyntaxElement::Node(it) => it.clone(),
276 SyntaxElement::Token(it) => it.parent(),
277 })
278 .fold1(|a, b| least_common_ancestor(&a, &b).unwrap())
279 }
280
281 fn replacement(&self, element: &SyntaxElement) -> Option<Replacement> {
282 if let Some(f) = &self.f {
283 assert!(self.replacements.is_empty());
284 return f(element).map(Replacement::Single);
285 }
286 self.replacements.get(element).cloned()
287 }
288
289 fn rewrite_children(&self, node: &SyntaxNode) -> SyntaxNode {
290 // FIXME: this could be made much faster.
291 let new_children =
292 node.children_with_tokens().flat_map(|it| self.rewrite_self(&it)).collect::<Vec<_>>();
293 with_children(node, new_children)
294 }
295
296 fn rewrite_self(
297 &self,
298 element: &SyntaxElement,
299 ) -> Option<NodeOrToken<rowan::GreenNode, rowan::GreenToken>> {
300 if let Some(replacement) = self.replacement(&element) {
249 return match replacement { 301 return match replacement {
250 NodeOrToken::Node(it) => NodeOrToken::Node(it.green().clone()), 302 Replacement::Single(NodeOrToken::Node(it)) => {
251 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), 303 Some(NodeOrToken::Node(it.green().clone()))
304 }
305 Replacement::Single(NodeOrToken::Token(it)) => {
306 Some(NodeOrToken::Token(it.green().clone()))
307 }
308 Replacement::Delete => None,
252 }; 309 };
253 } 310 }
254 match element { 311 let res = match element {
255 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), 312 NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()),
256 NodeOrToken::Node(it) => { 313 NodeOrToken::Node(it) => NodeOrToken::Node(self.rewrite_children(it).green().clone()),
257 NodeOrToken::Node(_replace_descendants(&it, map).green().clone()) 314 };
258 } 315 Some(res)
259 } 316 }
317}
318
319impl<'a> ops::AddAssign for SyntaxRewriter<'_> {
320 fn add_assign(&mut self, rhs: SyntaxRewriter) {
321 assert!(rhs.f.is_none());
322 self.replacements.extend(rhs.replacements)
260 } 323 }
261} 324}
262 325
326#[derive(Clone, Debug)]
327enum Replacement {
328 Delete,
329 Single(SyntaxElement),
330}
331
332/// Replaces descendants in the node, according to the mapping.
333///
334/// This is a type-unsafe low-level editing API, if you need to use it, prefer
335/// to create a type-safe abstraction on top of it instead.
336pub fn _replace_descendants(
337 parent: &SyntaxNode,
338 map: impl Fn(&SyntaxElement) -> Option<SyntaxElement>,
339) -> SyntaxNode {
340 SyntaxRewriter::from_fn(map).rewrite(parent)
341}
342
263fn with_children( 343fn with_children(
264 parent: &SyntaxNode, 344 parent: &SyntaxNode,
265 new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>, 345 new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>,
diff --git a/crates/ra_syntax/src/ast/edit.rs b/crates/ra_syntax/src/ast/edit.rs
index 324327162..f74c9f9c6 100644
--- a/crates/ra_syntax/src/ast/edit.rs
+++ b/crates/ra_syntax/src/ast/edit.rs
@@ -4,7 +4,6 @@
4use std::{iter, ops::RangeInclusive}; 4use std::{iter, ops::RangeInclusive};
5 5
6use arrayvec::ArrayVec; 6use arrayvec::ArrayVec;
7use rustc_hash::FxHashMap;
8 7
9use crate::{ 8use crate::{
10 algo, 9 algo,
@@ -17,6 +16,7 @@ use crate::{
17 SyntaxKind::{ATTR, COMMENT, WHITESPACE}, 16 SyntaxKind::{ATTR, COMMENT, WHITESPACE},
18 SyntaxNode, SyntaxToken, T, 17 SyntaxNode, SyntaxToken, T,
19}; 18};
19use algo::{neighbor, SyntaxRewriter};
20 20
21impl ast::BinExpr { 21impl ast::BinExpr {
22 #[must_use] 22 #[must_use]
@@ -255,6 +255,28 @@ impl ast::UseItem {
255 } 255 }
256 self.clone() 256 self.clone()
257 } 257 }
258
259 pub fn remove(&self) -> SyntaxRewriter<'static> {
260 let mut res = SyntaxRewriter::default();
261 res.delete(self.syntax());
262 let next_ws = self
263 .syntax()
264 .next_sibling_or_token()
265 .and_then(|it| it.into_token())
266 .and_then(ast::Whitespace::cast);
267 if let Some(next_ws) = next_ws {
268 let ws_text = next_ws.syntax().text();
269 if ws_text.starts_with('\n') {
270 let rest = &ws_text[1..];
271 if rest.is_empty() {
272 res.delete(next_ws.syntax())
273 } else {
274 res.replace(next_ws.syntax(), &make::tokens::whitespace(rest));
275 }
276 }
277 }
278 res
279 }
258} 280}
259 281
260impl ast::UseTree { 282impl ast::UseTree {
@@ -293,6 +315,22 @@ impl ast::UseTree {
293 Some(res) 315 Some(res)
294 } 316 }
295 } 317 }
318
319 pub fn remove(&self) -> SyntaxRewriter<'static> {
320 let mut res = SyntaxRewriter::default();
321 res.delete(self.syntax());
322 for &dir in [Direction::Next, Direction::Prev].iter() {
323 if let Some(nb) = neighbor(self, dir) {
324 self.syntax()
325 .siblings_with_tokens(dir)
326 .skip(1)
327 .take_while(|it| it.as_node() != Some(nb.syntax()))
328 .for_each(|el| res.delete(&el));
329 return res;
330 }
331 }
332 res
333 }
296} 334}
297 335
298#[must_use] 336#[must_use]
@@ -343,28 +381,24 @@ impl IndentLevel {
343 } 381 }
344 382
345 fn _increase_indent(self, node: SyntaxNode) -> SyntaxNode { 383 fn _increase_indent(self, node: SyntaxNode) -> SyntaxNode {
346 let replacements: FxHashMap<SyntaxElement, SyntaxElement> = node 384 let mut rewriter = SyntaxRewriter::default();
347 .descendants_with_tokens() 385 node.descendants_with_tokens()
348 .filter_map(|el| el.into_token()) 386 .filter_map(|el| el.into_token())
349 .filter_map(ast::Whitespace::cast) 387 .filter_map(ast::Whitespace::cast)
350 .filter(|ws| { 388 .filter(|ws| {
351 let text = ws.syntax().text(); 389 let text = ws.syntax().text();
352 text.contains('\n') 390 text.contains('\n')
353 }) 391 })
354 .map(|ws| { 392 .for_each(|ws| {
355 ( 393 let new_ws = make::tokens::whitespace(&format!(
356 ws.syntax().clone().into(), 394 "{}{:width$}",
357 make::tokens::whitespace(&format!( 395 ws.syntax().text(),
358 "{}{:width$}", 396 "",
359 ws.syntax().text(), 397 width = self.0 as usize * 4
360 "", 398 ));
361 width = self.0 as usize * 4 399 rewriter.replace(ws.syntax(), &new_ws)
362 )) 400 });
363 .into(), 401 rewriter.rewrite(&node)
364 )
365 })
366 .collect();
367 algo::replace_descendants(&node, |n| replacements.get(n).cloned())
368 } 402 }
369 403
370 pub fn decrease_indent<N: AstNode>(self, node: N) -> N { 404 pub fn decrease_indent<N: AstNode>(self, node: N) -> N {
@@ -372,27 +406,21 @@ impl IndentLevel {
372 } 406 }
373 407
374 fn _decrease_indent(self, node: SyntaxNode) -> SyntaxNode { 408 fn _decrease_indent(self, node: SyntaxNode) -> SyntaxNode {
375 let replacements: FxHashMap<SyntaxElement, SyntaxElement> = node 409 let mut rewriter = SyntaxRewriter::default();
376 .descendants_with_tokens() 410 node.descendants_with_tokens()
377 .filter_map(|el| el.into_token()) 411 .filter_map(|el| el.into_token())
378 .filter_map(ast::Whitespace::cast) 412 .filter_map(ast::Whitespace::cast)
379 .filter(|ws| { 413 .filter(|ws| {
380 let text = ws.syntax().text(); 414 let text = ws.syntax().text();
381 text.contains('\n') 415 text.contains('\n')
382 }) 416 })
383 .map(|ws| { 417 .for_each(|ws| {
384 ( 418 let new_ws = make::tokens::whitespace(
385 ws.syntax().clone().into(), 419 &ws.syntax().text().replace(&format!("\n{:1$}", "", self.0 as usize * 4), "\n"),
386 make::tokens::whitespace( 420 );
387 &ws.syntax() 421 rewriter.replace(ws.syntax(), &new_ws)
388 .text() 422 });
389 .replace(&format!("\n{:1$}", "", self.0 as usize * 4), "\n"), 423 rewriter.rewrite(&node)
390 )
391 .into(),
392 )
393 })
394 .collect();
395 algo::replace_descendants(&node, |n| replacements.get(n).cloned())
396 } 424 }
397} 425}
398 426
@@ -442,12 +470,11 @@ pub trait AstNodeEdit: AstNode + Sized {
442 &self, 470 &self,
443 replacement_map: impl IntoIterator<Item = (D, D)>, 471 replacement_map: impl IntoIterator<Item = (D, D)>,
444 ) -> Self { 472 ) -> Self {
445 let map = replacement_map 473 let mut rewriter = SyntaxRewriter::default();
446 .into_iter() 474 for (from, to) in replacement_map {
447 .map(|(from, to)| (from.syntax().clone().into(), to.syntax().clone().into())) 475 rewriter.replace(from.syntax(), to.syntax())
448 .collect::<FxHashMap<SyntaxElement, _>>(); 476 }
449 let new_syntax = algo::replace_descendants(self.syntax(), |n| map.get(n).cloned()); 477 rewriter.rewrite_ast(self)
450 Self::cast(new_syntax).unwrap()
451 } 478 }
452} 479}
453 480
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 103b2b53c..628ed107e 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -34,6 +34,8 @@ pub struct ServerConfig {
34 pub inlay_hints_type: bool, 34 pub inlay_hints_type: bool,
35 #[serde(deserialize_with = "nullable_bool_true")] 35 #[serde(deserialize_with = "nullable_bool_true")]
36 pub inlay_hints_parameter: bool, 36 pub inlay_hints_parameter: bool,
37 #[serde(deserialize_with = "nullable_bool_true")]
38 pub inlay_hints_chaining: bool,
37 pub inlay_hints_max_length: Option<usize>, 39 pub inlay_hints_max_length: Option<usize>,
38 40
39 pub cargo_watch_enable: bool, 41 pub cargo_watch_enable: bool,
@@ -66,6 +68,7 @@ impl Default for ServerConfig {
66 lru_capacity: None, 68 lru_capacity: None,
67 inlay_hints_type: true, 69 inlay_hints_type: true,
68 inlay_hints_parameter: true, 70 inlay_hints_parameter: true,
71 inlay_hints_chaining: true,
69 inlay_hints_max_length: None, 72 inlay_hints_max_length: None,
70 cargo_watch_enable: true, 73 cargo_watch_enable: true,
71 cargo_watch_args: Vec::new(), 74 cargo_watch_args: Vec::new(),
diff --git a/crates/rust-analyzer/src/conv.rs b/crates/rust-analyzer/src/conv.rs
index fd4657d7e..6edc03fe0 100644
--- a/crates/rust-analyzer/src/conv.rs
+++ b/crates/rust-analyzer/src/conv.rs
@@ -332,6 +332,7 @@ impl ConvWith<&LineIndex> for InlayHint {
332 kind: match self.kind { 332 kind: match self.kind {
333 InlayKind::ParameterHint => req::InlayKind::ParameterHint, 333 InlayKind::ParameterHint => req::InlayKind::ParameterHint,
334 InlayKind::TypeHint => req::InlayKind::TypeHint, 334 InlayKind::TypeHint => req::InlayKind::TypeHint,
335 InlayKind::ChainingHint => req::InlayKind::ChainingHint,
335 }, 336 },
336 } 337 }
337 } 338 }
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index a8a5894d2..7825b0077 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -183,6 +183,7 @@ pub fn main_loop(
183 inlay_hints: InlayHintsOptions { 183 inlay_hints: InlayHintsOptions {
184 type_hints: config.inlay_hints_type, 184 type_hints: config.inlay_hints_type,
185 parameter_hints: config.inlay_hints_parameter, 185 parameter_hints: config.inlay_hints_parameter,
186 chaining_hints: config.inlay_hints_chaining,
186 max_length: config.inlay_hints_max_length, 187 max_length: config.inlay_hints_max_length,
187 }, 188 },
188 cargo_watch: CheckOptions { 189 cargo_watch: CheckOptions {
diff --git a/crates/rust-analyzer/src/req.rs b/crates/rust-analyzer/src/req.rs
index 9e27d3f1c..8557294f6 100644
--- a/crates/rust-analyzer/src/req.rs
+++ b/crates/rust-analyzer/src/req.rs
@@ -200,6 +200,7 @@ pub struct InlayHintsParams {
200pub enum InlayKind { 200pub enum InlayKind {
201 TypeHint, 201 TypeHint,
202 ParameterHint, 202 ParameterHint,
203 ChainingHint,
203} 204}
204 205
205#[derive(Debug, Deserialize, Serialize)] 206#[derive(Debug, Deserialize, Serialize)]