aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock2
-rw-r--r--crates/ra_assists/Cargo.toml2
-rw-r--r--crates/ra_assists/src/add_missing_impl_members.rs149
-rw-r--r--crates/ra_assists/src/assist_ctx.rs4
-rw-r--r--crates/ra_assists/src/ast_editor.rs333
-rw-r--r--crates/ra_assists/src/fill_struct_fields.rs142
-rw-r--r--crates/ra_assists/src/lib.rs3
-rw-r--r--crates/ra_hir/src/ids.rs7
-rw-r--r--crates/ra_hir/src/nameres.rs16
-rw-r--r--crates/ra_hir/src/nameres/collector.rs165
-rw-r--r--crates/ra_ide_api/src/completion/complete_postfix.rs2
-rw-r--r--crates/ra_ide_api/src/completion/snapshots/completion_item__postfix_completion_works_for_trivial_path_expression.snap6
-rw-r--r--crates/ra_mbe/src/lib.rs101
-rw-r--r--crates/ra_mbe/src/subtree_parser.rs13
-rw-r--r--crates/ra_syntax/src/ast/extensions.rs9
-rw-r--r--crates/ra_syntax/src/lib.rs2
-rw-r--r--crates/ra_syntax/src/ptr.rs2
-rw-r--r--crates/ra_syntax/src/syntax_node.rs109
-rw-r--r--crates/ra_tt/src/lib.rs16
19 files changed, 869 insertions, 214 deletions
diff --git a/Cargo.lock b/Cargo.lock
index e5729f968..3be73d66d 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -903,8 +903,10 @@ version = "0.1.0"
903name = "ra_assists" 903name = "ra_assists"
904version = "0.1.0" 904version = "0.1.0"
905dependencies = [ 905dependencies = [
906 "arrayvec 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
906 "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", 907 "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
907 "join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", 908 "join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
909 "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
908 "ra_db 0.1.0", 910 "ra_db 0.1.0",
909 "ra_fmt 0.1.0", 911 "ra_fmt 0.1.0",
910 "ra_hir 0.1.0", 912 "ra_hir 0.1.0",
diff --git a/crates/ra_assists/Cargo.toml b/crates/ra_assists/Cargo.toml
index d4056a349..29d9ceb59 100644
--- a/crates/ra_assists/Cargo.toml
+++ b/crates/ra_assists/Cargo.toml
@@ -5,8 +5,10 @@ version = "0.1.0"
5authors = ["rust-analyzer developers"] 5authors = ["rust-analyzer developers"]
6 6
7[dependencies] 7[dependencies]
8lazy_static = "1.3.0"
8join_to_string = "0.1.3" 9join_to_string = "0.1.3"
9itertools = "0.8.0" 10itertools = "0.8.0"
11arrayvec = "0.4.10"
10 12
11ra_syntax = { path = "../ra_syntax" } 13ra_syntax = { path = "../ra_syntax" }
12ra_text_edit = { path = "../ra_text_edit" } 14ra_text_edit = { path = "../ra_text_edit" }
diff --git a/crates/ra_assists/src/add_missing_impl_members.rs b/crates/ra_assists/src/add_missing_impl_members.rs
index c82447b84..17c2af899 100644
--- a/crates/ra_assists/src/add_missing_impl_members.rs
+++ b/crates/ra_assists/src/add_missing_impl_members.rs
@@ -1,14 +1,9 @@
1use std::fmt::Write; 1use crate::{Assist, AssistId, AssistCtx, ast_editor::{AstEditor, AstBuilder}};
2
3use crate::{Assist, AssistId, AssistCtx};
4 2
5use hir::db::HirDatabase; 3use hir::db::HirDatabase;
6use ra_syntax::{SmolStr, SyntaxKind, TextRange, TextUnit, TreeArc}; 4use ra_syntax::{SmolStr, TreeArc};
7use ra_syntax::ast::{self, AstNode, AstToken, FnDef, ImplItem, ImplItemKind, NameOwner}; 5use ra_syntax::ast::{self, AstNode, FnDef, ImplItem, ImplItemKind, NameOwner};
8use ra_db::FilePosition; 6use ra_db::FilePosition;
9use ra_fmt::{leading_indent, reindent};
10
11use itertools::Itertools;
12 7
13enum AddMissingImplMembersMode { 8enum AddMissingImplMembersMode {
14 DefaultMethodsOnly, 9 DefaultMethodsOnly,
@@ -76,48 +71,35 @@ fn add_missing_impl_members_inner(
76 } 71 }
77 72
78 ctx.add_action(AssistId(assist_id), label, |edit| { 73 ctx.add_action(AssistId(assist_id), label, |edit| {
79 let (parent_indent, indent) = { 74 let n_existing_items = impl_item_list.impl_items().count();
80 // FIXME: Find a way to get the indent already used in the file. 75 let fns = missing_fns.into_iter().map(add_body_and_strip_docstring).collect::<Vec<_>>();
81 // Now, we copy the indent of first item or indent with 4 spaces relative to impl block 76
82 const DEFAULT_INDENT: &str = " "; 77 let mut ast_editor = AstEditor::new(impl_item_list);
83 let first_item = impl_item_list.impl_items().next(); 78 if n_existing_items == 0 {
84 let first_item_indent = 79 ast_editor.make_multiline();
85 first_item.and_then(|i| leading_indent(i.syntax())).map(ToOwned::to_owned); 80 }
86 let impl_block_indent = leading_indent(impl_node.syntax()).unwrap_or_default(); 81 ast_editor.append_functions(fns.iter().map(|it| &**it));
87 82 let first_new_item = ast_editor.ast().impl_items().nth(n_existing_items).unwrap();
88 ( 83 let cursor_poisition = first_new_item.syntax().range().start();
89 impl_block_indent.to_owned(), 84 ast_editor.into_text_edit(edit.text_edit_builder());
90 first_item_indent.unwrap_or_else(|| impl_block_indent.to_owned() + DEFAULT_INDENT), 85
91 ) 86 edit.set_cursor(cursor_poisition);
92 };
93
94 let changed_range = {
95 let children = impl_item_list.syntax().children_with_tokens();
96 let last_whitespace =
97 children.filter_map(|it| ast::Whitespace::cast(it.as_token()?)).last();
98
99 last_whitespace.map(|w| w.syntax().range()).unwrap_or_else(|| {
100 let in_brackets = impl_item_list.syntax().range().end() - TextUnit::of_str("}");
101 TextRange::from_to(in_brackets, in_brackets)
102 })
103 };
104
105 let func_bodies = format!("\n{}", missing_fns.into_iter().map(build_func_body).join("\n"));
106 let trailing_whitespace = format!("\n{}", parent_indent);
107 let func_bodies = reindent(&func_bodies, &indent) + &trailing_whitespace;
108
109 let replaced_text_range = TextUnit::of_str(&func_bodies);
110
111 edit.replace(changed_range, func_bodies);
112 // FIXME: place the cursor on the first unimplemented?
113 edit.set_cursor(
114 changed_range.start() + replaced_text_range - TextUnit::of_str(&trailing_whitespace),
115 );
116 }); 87 });
117 88
118 ctx.build() 89 ctx.build()
119} 90}
120 91
92fn add_body_and_strip_docstring(fn_def: &ast::FnDef) -> TreeArc<ast::FnDef> {
93 let mut ast_editor = AstEditor::new(fn_def);
94 if fn_def.body().is_none() {
95 ast_editor.set_body(&AstBuilder::<ast::Block>::single_expr(
96 &AstBuilder::<ast::Expr>::unimplemented(),
97 ));
98 }
99 ast_editor.strip_attrs_and_docs();
100 ast_editor.ast().to_owned()
101}
102
121/// Given an `ast::ImplBlock`, resolves the target trait (the one being 103/// Given an `ast::ImplBlock`, resolves the target trait (the one being
122/// implemented) to a `ast::TraitDef`. 104/// implemented) to a `ast::TraitDef`.
123fn resolve_target_trait_def( 105fn resolve_target_trait_def(
@@ -134,22 +116,6 @@ fn resolve_target_trait_def(
134 } 116 }
135} 117}
136 118
137fn build_func_body(def: &ast::FnDef) -> String {
138 let mut buf = String::new();
139
140 for child in def.syntax().children_with_tokens() {
141 match (child.prev_sibling_or_token().map(|c| c.kind()), child.kind()) {
142 (_, SyntaxKind::SEMI) => buf.push_str(" {\n unimplemented!()\n}"),
143 (_, SyntaxKind::ATTR) | (_, SyntaxKind::COMMENT) => {}
144 (Some(SyntaxKind::ATTR), SyntaxKind::WHITESPACE)
145 | (Some(SyntaxKind::COMMENT), SyntaxKind::WHITESPACE) => {}
146 _ => write!(buf, "{}", child).unwrap(),
147 };
148 }
149
150 buf.trim_end().to_string()
151}
152
153#[cfg(test)] 119#[cfg(test)]
154mod tests { 120mod tests {
155 use super::*; 121 use super::*;
@@ -170,7 +136,7 @@ struct S;
170 136
171impl Foo for S { 137impl Foo for S {
172 fn bar(&self) {} 138 fn bar(&self) {}
173 <|> 139<|>
174}", 140}",
175 " 141 "
176trait Foo { 142trait Foo {
@@ -183,12 +149,9 @@ struct S;
183 149
184impl Foo for S { 150impl Foo for S {
185 fn bar(&self) {} 151 fn bar(&self) {}
186 fn foo(&self) { 152 <|>fn foo(&self) { unimplemented!() }
187 unimplemented!() 153 fn baz(&self) { unimplemented!() }
188 } 154
189 fn baz(&self) {
190 unimplemented!()
191 }<|>
192}", 155}",
193 ); 156 );
194 } 157 }
@@ -208,7 +171,7 @@ struct S;
208 171
209impl Foo for S { 172impl Foo for S {
210 fn bar(&self) {} 173 fn bar(&self) {}
211 <|> 174<|>
212}", 175}",
213 " 176 "
214trait Foo { 177trait Foo {
@@ -221,9 +184,8 @@ struct S;
221 184
222impl Foo for S { 185impl Foo for S {
223 fn bar(&self) {} 186 fn bar(&self) {}
224 fn foo(&self) { 187 <|>fn foo(&self) { unimplemented!() }
225 unimplemented!() 188
226 }<|>
227}", 189}",
228 ); 190 );
229 } 191 }
@@ -240,9 +202,7 @@ impl Foo for S { <|> }",
240trait Foo { fn foo(&self); } 202trait Foo { fn foo(&self); }
241struct S; 203struct S;
242impl Foo for S { 204impl Foo for S {
243 fn foo(&self) { 205 <|>fn foo(&self) { unimplemented!() }
244 unimplemented!()
245 }<|>
246}", 206}",
247 ); 207 );
248 } 208 }
@@ -259,9 +219,7 @@ impl Foo for S {}<|>",
259trait Foo { fn foo(&self); } 219trait Foo { fn foo(&self); }
260struct S; 220struct S;
261impl Foo for S { 221impl Foo for S {
262 fn foo(&self) { 222 <|>fn foo(&self) { unimplemented!() }
263 unimplemented!()
264 }<|>
265}", 223}",
266 ) 224 )
267 } 225 }
@@ -292,35 +250,6 @@ impl Foo for S { <|> }",
292 } 250 }
293 251
294 #[test] 252 #[test]
295 fn test_indented_impl_block() {
296 check_assist(
297 add_missing_impl_members,
298 "
299trait Foo {
300 fn valid(some: u32) -> bool;
301}
302struct S;
303
304mod my_mod {
305 impl crate::Foo for S { <|> }
306}",
307 "
308trait Foo {
309 fn valid(some: u32) -> bool;
310}
311struct S;
312
313mod my_mod {
314 impl crate::Foo for S {
315 fn valid(some: u32) -> bool {
316 unimplemented!()
317 }<|>
318 }
319}",
320 )
321 }
322
323 #[test]
324 fn test_with_docstring_and_attrs() { 253 fn test_with_docstring_and_attrs() {
325 check_assist( 254 check_assist(
326 add_missing_impl_members, 255 add_missing_impl_members,
@@ -342,9 +271,7 @@ trait Foo {
342} 271}
343struct S; 272struct S;
344impl Foo for S { 273impl Foo for S {
345 fn foo(&self) { 274 <|>fn foo(&self) { unimplemented!() }
346 unimplemented!()
347 }<|>
348}"#, 275}"#,
349 ) 276 )
350 } 277 }
@@ -367,7 +294,7 @@ trait Foo {
367} 294}
368struct S; 295struct S;
369impl Foo for S { 296impl Foo for S {
370 fn valid(some: u32) -> bool { false }<|> 297 <|>fn valid(some: u32) -> bool { false }
371}", 298}",
372 ) 299 )
373 } 300 }
diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs
index 17a9041c5..e744e82d0 100644
--- a/crates/ra_assists/src/assist_ctx.rs
+++ b/crates/ra_assists/src/assist_ctx.rs
@@ -165,6 +165,10 @@ impl AssistBuilder {
165 self.target = Some(target) 165 self.target = Some(target)
166 } 166 }
167 167
168 pub(crate) fn text_edit_builder(&mut self) -> &mut TextEditBuilder {
169 &mut self.edit
170 }
171
168 fn build(self) -> AssistAction { 172 fn build(self) -> AssistAction {
169 AssistAction { 173 AssistAction {
170 edit: self.edit.finish(), 174 edit: self.edit.finish(),
diff --git a/crates/ra_assists/src/ast_editor.rs b/crates/ra_assists/src/ast_editor.rs
new file mode 100644
index 000000000..6854294ae
--- /dev/null
+++ b/crates/ra_assists/src/ast_editor.rs
@@ -0,0 +1,333 @@
1use std::{iter, ops::RangeInclusive};
2
3use arrayvec::ArrayVec;
4use ra_text_edit::TextEditBuilder;
5use ra_syntax::{AstNode, TreeArc, ast, SyntaxKind::*, SyntaxElement, SourceFile, InsertPosition, Direction};
6use ra_fmt::leading_indent;
7
8pub struct AstEditor<N: AstNode> {
9 original_ast: TreeArc<N>,
10 ast: TreeArc<N>,
11}
12
13impl<N: AstNode> AstEditor<N> {
14 pub fn new(node: &N) -> AstEditor<N> {
15 AstEditor { original_ast: node.to_owned(), ast: node.to_owned() }
16 }
17
18 pub fn into_text_edit(self, builder: &mut TextEditBuilder) {
19 // FIXME: compute a more fine-grained diff here.
20 // If *you* know a nice algorithm to compute diff between two syntax
21 // tree, tell me about it!
22 builder.replace(self.original_ast.syntax().range(), self.ast().syntax().text().to_string());
23 }
24
25 pub fn ast(&self) -> &N {
26 &*self.ast
27 }
28
29 #[must_use]
30 fn insert_children<'a>(
31 &self,
32 position: InsertPosition<SyntaxElement<'_>>,
33 to_insert: impl Iterator<Item = SyntaxElement<'a>>,
34 ) -> TreeArc<N> {
35 let new_syntax = self.ast().syntax().insert_children(position, to_insert);
36 N::cast(&new_syntax).unwrap().to_owned()
37 }
38
39 #[must_use]
40 fn replace_children<'a>(
41 &self,
42 to_delete: RangeInclusive<SyntaxElement<'_>>,
43 to_insert: impl Iterator<Item = SyntaxElement<'a>>,
44 ) -> TreeArc<N> {
45 let new_syntax = self.ast().syntax().replace_children(to_delete, to_insert);
46 N::cast(&new_syntax).unwrap().to_owned()
47 }
48
49 fn do_make_multiline(&mut self) {
50 let l_curly =
51 match self.ast().syntax().children_with_tokens().find(|it| it.kind() == L_CURLY) {
52 Some(it) => it,
53 None => return,
54 };
55 let sibling = match l_curly.next_sibling_or_token() {
56 Some(it) => it,
57 None => return,
58 };
59 let existing_ws = match sibling.as_token() {
60 None => None,
61 Some(tok) if tok.kind() != WHITESPACE => None,
62 Some(ws) => {
63 if ws.text().contains('\n') {
64 return;
65 }
66 Some(ws)
67 }
68 };
69
70 let indent = leading_indent(self.ast().syntax()).unwrap_or("");
71 let ws = tokens::WsBuilder::new(&format!("\n{}", indent));
72 let to_insert = iter::once(ws.ws().into());
73 self.ast = match existing_ws {
74 None => self.insert_children(InsertPosition::After(l_curly), to_insert),
75 Some(ws) => self.replace_children(RangeInclusive::new(ws.into(), ws.into()), to_insert),
76 };
77 }
78}
79
80impl AstEditor<ast::NamedFieldList> {
81 pub fn append_field(&mut self, field: &ast::NamedField) {
82 self.insert_field(InsertPosition::Last, field)
83 }
84
85 pub fn make_multiline(&mut self) {
86 self.do_make_multiline()
87 }
88
89 pub fn insert_field(
90 &mut self,
91 position: InsertPosition<&'_ ast::NamedField>,
92 field: &ast::NamedField,
93 ) {
94 let is_multiline = self.ast().syntax().text().contains('\n');
95 let ws;
96 let space = if is_multiline {
97 ws = tokens::WsBuilder::new(&format!(
98 "\n{} ",
99 leading_indent(self.ast().syntax()).unwrap_or("")
100 ));
101 ws.ws()
102 } else {
103 tokens::single_space()
104 };
105
106 let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new();
107 to_insert.push(space.into());
108 to_insert.push(field.syntax().into());
109 to_insert.push(tokens::comma().into());
110
111 macro_rules! after_l_curly {
112 () => {{
113 let anchor = match self.l_curly() {
114 Some(it) => it,
115 None => return,
116 };
117 InsertPosition::After(anchor)
118 }};
119 }
120
121 macro_rules! after_field {
122 ($anchor:expr) => {
123 if let Some(comma) = $anchor
124 .syntax()
125 .siblings_with_tokens(Direction::Next)
126 .find(|it| it.kind() == COMMA)
127 {
128 InsertPosition::After(comma)
129 } else {
130 to_insert.insert(0, tokens::comma().into());
131 InsertPosition::After($anchor.syntax().into())
132 }
133 };
134 };
135
136 let position = match position {
137 InsertPosition::First => after_l_curly!(),
138 InsertPosition::Last => {
139 if !is_multiline {
140 // don't insert comma before curly
141 to_insert.pop();
142 }
143 match self.ast().fields().last() {
144 Some(it) => after_field!(it),
145 None => after_l_curly!(),
146 }
147 }
148 InsertPosition::Before(anchor) => InsertPosition::Before(anchor.syntax().into()),
149 InsertPosition::After(anchor) => after_field!(anchor),
150 };
151
152 self.ast = self.insert_children(position, to_insert.iter().cloned());
153 }
154
155 fn l_curly(&self) -> Option<SyntaxElement> {
156 self.ast().syntax().children_with_tokens().find(|it| it.kind() == L_CURLY)
157 }
158}
159
160impl AstEditor<ast::ItemList> {
161 pub fn make_multiline(&mut self) {
162 self.do_make_multiline()
163 }
164
165 pub fn append_functions<'a>(&mut self, fns: impl Iterator<Item = &'a ast::FnDef>) {
166 fns.for_each(|it| self.append_function(it))
167 }
168
169 pub fn append_function(&mut self, fn_def: &ast::FnDef) {
170 let (indent, position) = match self.ast().impl_items().last() {
171 Some(it) => (
172 leading_indent(it.syntax()).unwrap_or("").to_string(),
173 InsertPosition::After(it.syntax().into()),
174 ),
175 None => match self.l_curly() {
176 Some(it) => (
177 " ".to_string() + leading_indent(self.ast().syntax()).unwrap_or(""),
178 InsertPosition::After(it),
179 ),
180 None => return,
181 },
182 };
183 let ws = tokens::WsBuilder::new(&format!("\n{}", indent));
184 let to_insert: ArrayVec<[SyntaxElement; 2]> =
185 [ws.ws().into(), fn_def.syntax().into()].into();
186 self.ast = self.insert_children(position, to_insert.into_iter());
187 }
188
189 fn l_curly(&self) -> Option<SyntaxElement> {
190 self.ast().syntax().children_with_tokens().find(|it| it.kind() == L_CURLY)
191 }
192}
193
194impl AstEditor<ast::FnDef> {
195 pub fn set_body(&mut self, body: &ast::Block) {
196 let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new();
197 let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.ast().body() {
198 old_body.syntax().into()
199 } else if let Some(semi) = self.ast().semicolon_token() {
200 to_insert.push(tokens::single_space().into());
201 semi.into()
202 } else {
203 to_insert.push(tokens::single_space().into());
204 to_insert.push(body.syntax().into());
205 self.ast = self.insert_children(InsertPosition::Last, to_insert.into_iter());
206 return;
207 };
208 to_insert.push(body.syntax().into());
209 let replace_range = RangeInclusive::new(old_body_or_semi, old_body_or_semi);
210 self.ast = self.replace_children(replace_range, to_insert.into_iter())
211 }
212
213 pub fn strip_attrs_and_docs(&mut self) {
214 loop {
215 if let Some(start) = self
216 .ast()
217 .syntax()
218 .children_with_tokens()
219 .find(|it| it.kind() == ATTR || it.kind() == COMMENT)
220 {
221 let end = match start.next_sibling_or_token() {
222 Some(el) if el.kind() == WHITESPACE => el,
223 Some(_) | None => start,
224 };
225 self.ast = self.replace_children(RangeInclusive::new(start, end), iter::empty());
226 } else {
227 break;
228 }
229 }
230 }
231}
232
233pub struct AstBuilder<N: AstNode> {
234 _phantom: std::marker::PhantomData<N>,
235}
236
237impl AstBuilder<ast::NamedField> {
238 fn from_text(text: &str) -> TreeArc<ast::NamedField> {
239 ast_node_from_file_text(&format!("fn f() {{ S {{ {}, }} }}", text))
240 }
241
242 pub fn from_pieces(name: &ast::NameRef, expr: Option<&ast::Expr>) -> TreeArc<ast::NamedField> {
243 match expr {
244 Some(expr) => Self::from_text(&format!("{}: {}", name.syntax(), expr.syntax())),
245 None => Self::from_text(&name.syntax().to_string()),
246 }
247 }
248}
249
250impl AstBuilder<ast::Block> {
251 fn from_text(text: &str) -> TreeArc<ast::Block> {
252 ast_node_from_file_text(&format!("fn f() {}", text))
253 }
254
255 pub fn single_expr(e: &ast::Expr) -> TreeArc<ast::Block> {
256 Self::from_text(&format!("{{ {} }}", e.syntax()))
257 }
258}
259
260impl AstBuilder<ast::Expr> {
261 fn from_text(text: &str) -> TreeArc<ast::Expr> {
262 ast_node_from_file_text(&format!("fn f() {{ {}; }}", text))
263 }
264
265 pub fn unit() -> TreeArc<ast::Expr> {
266 Self::from_text("()")
267 }
268
269 pub fn unimplemented() -> TreeArc<ast::Expr> {
270 Self::from_text("unimplemented!()")
271 }
272}
273
274impl AstBuilder<ast::NameRef> {
275 pub fn new(text: &str) -> TreeArc<ast::NameRef> {
276 ast_node_from_file_text(&format!("fn f() {{ {}; }}", text))
277 }
278}
279
280fn ast_node_from_file_text<N: AstNode>(text: &str) -> TreeArc<N> {
281 let file = SourceFile::parse(text);
282 let res = file.syntax().descendants().find_map(N::cast).unwrap().to_owned();
283 res
284}
285
286mod tokens {
287 use lazy_static::lazy_static;
288 use ra_syntax::{AstNode, SourceFile, TreeArc, SyntaxToken, SyntaxKind::*};
289
290 lazy_static! {
291 static ref SOURCE_FILE: TreeArc<SourceFile> = SourceFile::parse(",\n; ;");
292 }
293
294 pub(crate) fn comma() -> SyntaxToken<'static> {
295 SOURCE_FILE
296 .syntax()
297 .descendants_with_tokens()
298 .filter_map(|it| it.as_token())
299 .find(|it| it.kind() == COMMA)
300 .unwrap()
301 }
302
303 pub(crate) fn single_space() -> SyntaxToken<'static> {
304 SOURCE_FILE
305 .syntax()
306 .descendants_with_tokens()
307 .filter_map(|it| it.as_token())
308 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ")
309 .unwrap()
310 }
311
312 #[allow(unused)]
313 pub(crate) fn single_newline() -> SyntaxToken<'static> {
314 SOURCE_FILE
315 .syntax()
316 .descendants_with_tokens()
317 .filter_map(|it| it.as_token())
318 .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n")
319 .unwrap()
320 }
321
322 pub(crate) struct WsBuilder(TreeArc<SourceFile>);
323
324 impl WsBuilder {
325 pub(crate) fn new(text: &str) -> WsBuilder {
326 WsBuilder(SourceFile::parse(text))
327 }
328 pub(crate) fn ws(&self) -> SyntaxToken<'_> {
329 self.0.syntax().first_child_or_token().unwrap().as_token().unwrap()
330 }
331 }
332
333}
diff --git a/crates/ra_assists/src/fill_struct_fields.rs b/crates/ra_assists/src/fill_struct_fields.rs
index 663b4f669..54b70e17d 100644
--- a/crates/ra_assists/src/fill_struct_fields.rs
+++ b/crates/ra_assists/src/fill_struct_fields.rs
@@ -1,94 +1,55 @@
1use std::fmt::Write;
2
3use hir::{AdtDef, db::HirDatabase}; 1use hir::{AdtDef, db::HirDatabase};
4 2
5use ra_syntax::ast::{self, AstNode}; 3use ra_syntax::ast::{self, AstNode};
6 4
7use crate::{AssistCtx, Assist, AssistId}; 5use crate::{AssistCtx, Assist, AssistId, ast_editor::{AstEditor, AstBuilder}};
8 6
9pub(crate) fn fill_struct_fields(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { 7pub(crate) fn fill_struct_fields(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
10 let struct_lit = ctx.node_at_offset::<ast::StructLit>()?; 8 let struct_lit = ctx.node_at_offset::<ast::StructLit>()?;
11 let mut fsf = FillStructFields { 9 let named_field_list = struct_lit.named_field_list()?;
12 ctx: &mut ctx,
13 named_field_list: struct_lit.named_field_list()?,
14 struct_fields: vec![],
15 struct_lit,
16 };
17 fsf.evaluate_struct_def_fields()?;
18 if fsf.struct_lit_and_def_have_the_same_number_of_fields() {
19 return None;
20 }
21 fsf.remove_already_included_fields()?;
22 fsf.add_action()?;
23 ctx.build()
24}
25 10
26struct FillStructFields<'a, 'b: 'a, DB> { 11 // Collect all fields from struct definition
27 ctx: &'a mut AssistCtx<'b, DB>, 12 let mut fields = {
28 named_field_list: &'a ast::NamedFieldList, 13 let analyzer =
29 struct_fields: Vec<(String, String)>, 14 hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, struct_lit.syntax(), None);
30 struct_lit: &'a ast::StructLit, 15 let struct_lit_ty = analyzer.type_of(ctx.db, struct_lit.into())?;
31}
32
33impl<DB> FillStructFields<'_, '_, DB>
34where
35 DB: HirDatabase,
36{
37 fn add_action(&mut self) -> Option<()> {
38 let named_field_list = self.named_field_list;
39 let struct_fields_string = self.struct_fields_string()?;
40 let struct_lit = self.struct_lit;
41 self.ctx.add_action(AssistId("fill_struct_fields"), "fill struct fields", |edit| {
42 edit.target(struct_lit.syntax().range());
43 edit.set_cursor(struct_lit.syntax().range().start());
44 edit.replace_node_and_indent(named_field_list.syntax(), struct_fields_string);
45 });
46 Some(())
47 }
48
49 fn struct_lit_and_def_have_the_same_number_of_fields(&self) -> bool {
50 self.named_field_list.fields().count() == self.struct_fields.len()
51 }
52
53 fn evaluate_struct_def_fields(&mut self) -> Option<()> {
54 let analyzer = hir::SourceAnalyzer::new(
55 self.ctx.db,
56 self.ctx.frange.file_id,
57 self.struct_lit.syntax(),
58 None,
59 );
60 let struct_lit_ty = analyzer.type_of(self.ctx.db, self.struct_lit.into())?;
61 let struct_def = match struct_lit_ty.as_adt() { 16 let struct_def = match struct_lit_ty.as_adt() {
62 Some((AdtDef::Struct(s), _)) => s, 17 Some((AdtDef::Struct(s), _)) => s,
63 _ => return None, 18 _ => return None,
64 }; 19 };
65 self.struct_fields = struct_def 20 struct_def.fields(ctx.db)
66 .fields(self.ctx.db) 21 };
67 .into_iter()
68 .map(|f| (f.name(self.ctx.db).to_string(), "()".into()))
69 .collect();
70 Some(())
71 }
72 22
73 fn remove_already_included_fields(&mut self) -> Option<()> { 23 // Filter out existing fields
74 for ast_field in self.named_field_list.fields() { 24 for ast_field in named_field_list.fields() {
75 let expr = ast_field.expr()?.syntax().text().to_string(); 25 let name_from_ast = ast_field.name_ref()?.text().to_string();
76 let name_from_ast = ast_field.name_ref()?.text().to_string(); 26 fields.retain(|field| field.name(ctx.db).to_string() != name_from_ast);
77 if let Some(idx) = self.struct_fields.iter().position(|(n, _)| n == &name_from_ast) { 27 }
78 self.struct_fields[idx] = (name_from_ast, expr); 28 if fields.is_empty() {
79 } 29 return None;
80 }
81 Some(())
82 } 30 }
83 31
84 fn struct_fields_string(&mut self) -> Option<String> { 32 let db = ctx.db;
85 let mut buf = String::from("{\n"); 33 ctx.add_action(AssistId("fill_struct_fields"), "fill struct fields", |edit| {
86 for (name, expr) in &self.struct_fields { 34 let mut ast_editor = AstEditor::new(named_field_list);
87 write!(&mut buf, " {}: {},\n", name, expr).unwrap(); 35 if named_field_list.fields().count() == 0 && fields.len() > 2 {
36 ast_editor.make_multiline();
37 };
38
39 for field in fields {
40 let field = AstBuilder::<ast::NamedField>::from_pieces(
41 &AstBuilder::<ast::NameRef>::new(&field.name(db).to_string()),
42 Some(&AstBuilder::<ast::Expr>::unit()),
43 );
44 ast_editor.append_field(&field);
88 } 45 }
89 buf.push_str("}"); 46
90 Some(buf) 47 edit.target(struct_lit.syntax().range());
91 } 48 edit.set_cursor(struct_lit.syntax().range().start());
49
50 ast_editor.into_text_edit(edit.text_edit_builder());
51 });
52 ctx.build()
92} 53}
93 54
94#[cfg(test)] 55#[cfg(test)]
@@ -225,14 +186,41 @@ mod tests {
225 186
226 fn main() { 187 fn main() {
227 let s = <|>S { 188 let s = <|>S {
189 c: (1, 2),
190 e: "foo",
228 a: (), 191 a: (),
229 b: (), 192 b: (),
230 c: (1, 2),
231 d: (), 193 d: (),
232 e: "foo",
233 } 194 }
234 } 195 }
235 "#, 196 "#,
236 ); 197 );
237 } 198 }
199
200 #[test]
201 fn fill_struct_short() {
202 check_assist(
203 fill_struct_fields,
204 r#"
205 struct S {
206 foo: u32,
207 bar: String,
208 }
209
210 fn main() {
211 let s = S {<|> };
212 }
213 "#,
214 r#"
215 struct S {
216 foo: u32,
217 bar: String,
218 }
219
220 fn main() {
221 let s = <|>S { foo: (), bar: () };
222 }
223 "#,
224 );
225 }
238} 226}
diff --git a/crates/ra_assists/src/lib.rs b/crates/ra_assists/src/lib.rs
index 173c004cd..4c330c907 100644
--- a/crates/ra_assists/src/lib.rs
+++ b/crates/ra_assists/src/lib.rs
@@ -7,6 +7,7 @@
7 7
8mod assist_ctx; 8mod assist_ctx;
9mod marks; 9mod marks;
10pub mod ast_editor;
10 11
11use itertools::Itertools; 12use itertools::Itertools;
12 13
@@ -36,7 +37,7 @@ pub struct AssistAction {
36 pub target: Option<TextRange>, 37 pub target: Option<TextRange>,
37} 38}
38 39
39/// Return all the assists applicable at the given position. 40/// Return all the assists eapplicable at the given position.
40/// 41///
41/// Assists are returned in the "unresolved" state, that is only labels are 42/// Assists are returned in the "unresolved" state, that is only labels are
42/// returned, without actual edits. 43/// returned, without actual edits.
diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs
index e771a311c..c7849c995 100644
--- a/crates/ra_hir/src/ids.rs
+++ b/crates/ra_hir/src/ids.rs
@@ -94,6 +94,13 @@ fn parse_macro(
94 94
95 let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?; 95 let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?;
96 let tt = macro_rules.expand(&macro_arg).map_err(|err| format!("{:?}", err))?; 96 let tt = macro_rules.expand(&macro_arg).map_err(|err| format!("{:?}", err))?;
97
98 // Set a hard limit for the expanded tt
99 let count = tt.count();
100 if count > 65536 {
101 return Err(format!("Total tokens count exceed limit : count = {}", count));
102 }
103
97 Ok(mbe::token_tree_to_ast_item_list(&tt)) 104 Ok(mbe::token_tree_to_ast_item_list(&tt))
98} 105}
99 106
diff --git a/crates/ra_hir/src/nameres.rs b/crates/ra_hir/src/nameres.rs
index 39152360c..fbfff4fd7 100644
--- a/crates/ra_hir/src/nameres.rs
+++ b/crates/ra_hir/src/nameres.rs
@@ -55,7 +55,7 @@ mod tests;
55 55
56use std::sync::Arc; 56use std::sync::Arc;
57 57
58use rustc_hash::FxHashMap; 58use rustc_hash::{FxHashMap, FxHashSet};
59use ra_arena::{Arena, RawId, impl_arena_id}; 59use ra_arena::{Arena, RawId, impl_arena_id};
60use ra_db::{FileId, Edition}; 60use ra_db::{FileId, Edition};
61use test_utils::tested_by; 61use test_utils::tested_by;
@@ -91,6 +91,19 @@ pub struct CrateDefMap {
91 root: CrateModuleId, 91 root: CrateModuleId,
92 modules: Arena<CrateModuleId, ModuleData>, 92 modules: Arena<CrateModuleId, ModuleData>,
93 public_macros: FxHashMap<Name, MacroDefId>, 93 public_macros: FxHashMap<Name, MacroDefId>,
94
95 /// Some macros are not well-behavior, which leads to infinite loop
96 /// e.g. macro_rules! foo { ($ty:ty) => { foo!($ty); } }
97 /// We mark it down and skip it in collector
98 ///
99 /// FIXME:
100 /// Right now it only handle a poison macro in a single crate,
101 /// such that if other crate try to call that macro,
102 /// the whole process will do again until it became poisoned in that crate.
103 /// We should handle this macro set globally
104 /// However, do we want to put it as a global variable?
105 poison_macros: FxHashSet<MacroDefId>,
106
94 diagnostics: Vec<DefDiagnostic>, 107 diagnostics: Vec<DefDiagnostic>,
95} 108}
96 109
@@ -195,6 +208,7 @@ impl CrateDefMap {
195 root, 208 root,
196 modules, 209 modules,
197 public_macros: FxHashMap::default(), 210 public_macros: FxHashMap::default(),
211 poison_macros: FxHashSet::default(),
198 diagnostics: Vec::new(), 212 diagnostics: Vec::new(),
199 } 213 }
200 }; 214 };
diff --git a/crates/ra_hir/src/nameres/collector.rs b/crates/ra_hir/src/nameres/collector.rs
index 6147b3219..4590a5184 100644
--- a/crates/ra_hir/src/nameres/collector.rs
+++ b/crates/ra_hir/src/nameres/collector.rs
@@ -42,12 +42,40 @@ pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> C
42 unresolved_imports: Vec::new(), 42 unresolved_imports: Vec::new(),
43 unexpanded_macros: Vec::new(), 43 unexpanded_macros: Vec::new(),
44 global_macro_scope: FxHashMap::default(), 44 global_macro_scope: FxHashMap::default(),
45 marco_stack_count: 0, 45 macro_stack_monitor: MacroStackMonitor::default(),
46 }; 46 };
47 collector.collect(); 47 collector.collect();
48 collector.finish() 48 collector.finish()
49} 49}
50 50
51#[derive(Default)]
52struct MacroStackMonitor {
53 counts: FxHashMap<MacroDefId, u32>,
54
55 /// Mainly use for test
56 validator: Option<Box<dyn Fn(u32) -> bool>>,
57}
58
59impl MacroStackMonitor {
60 fn increase(&mut self, macro_def_id: MacroDefId) {
61 *self.counts.entry(macro_def_id).or_default() += 1;
62 }
63
64 fn decrease(&mut self, macro_def_id: MacroDefId) {
65 *self.counts.entry(macro_def_id).or_default() -= 1;
66 }
67
68 fn is_poison(&self, macro_def_id: MacroDefId) -> bool {
69 let cur = *self.counts.get(&macro_def_id).unwrap_or(&0);
70
71 if let Some(validator) = &self.validator {
72 validator(cur)
73 } else {
74 cur > 100
75 }
76 }
77}
78
51/// Walks the tree of module recursively 79/// Walks the tree of module recursively
52struct DefCollector<DB> { 80struct DefCollector<DB> {
53 db: DB, 81 db: DB,
@@ -59,7 +87,7 @@ struct DefCollector<DB> {
59 87
60 /// Some macro use `$tt:tt which mean we have to handle the macro perfectly 88 /// Some macro use `$tt:tt which mean we have to handle the macro perfectly
61 /// To prevent stackoverflow, we add a deep counter here for prevent that. 89 /// To prevent stackoverflow, we add a deep counter here for prevent that.
62 marco_stack_count: u32, 90 macro_stack_monitor: MacroStackMonitor,
63} 91}
64 92
65impl<'a, DB> DefCollector<&'a DB> 93impl<'a, DB> DefCollector<&'a DB>
@@ -317,30 +345,40 @@ where
317 let def_map = self.db.crate_def_map(krate); 345 let def_map = self.db.crate_def_map(krate);
318 if let Some(macro_id) = def_map.public_macros.get(&path.segments[1].name).cloned() { 346 if let Some(macro_id) = def_map.public_macros.get(&path.segments[1].name).cloned() {
319 let call_id = MacroCallLoc { def: macro_id, ast_id: *ast_id }.id(self.db); 347 let call_id = MacroCallLoc { def: macro_id, ast_id: *ast_id }.id(self.db);
320 resolved.push((*module_id, call_id)); 348 resolved.push((*module_id, call_id, macro_id));
321 } 349 }
322 false 350 false
323 }); 351 });
324 352
325 for (module_id, macro_call_id) in resolved { 353 for (module_id, macro_call_id, macro_def_id) in resolved {
326 self.collect_macro_expansion(module_id, macro_call_id); 354 self.collect_macro_expansion(module_id, macro_call_id, macro_def_id);
327 } 355 }
328 res 356 res
329 } 357 }
330 358
331 fn collect_macro_expansion(&mut self, module_id: CrateModuleId, macro_call_id: MacroCallId) { 359 fn collect_macro_expansion(
332 self.marco_stack_count += 1; 360 &mut self,
361 module_id: CrateModuleId,
362 macro_call_id: MacroCallId,
363 macro_def_id: MacroDefId,
364 ) {
365 if self.def_map.poison_macros.contains(&macro_def_id) {
366 return;
367 }
368
369 self.macro_stack_monitor.increase(macro_def_id);
333 370
334 if self.marco_stack_count < 300 { 371 if !self.macro_stack_monitor.is_poison(macro_def_id) {
335 let file_id: HirFileId = macro_call_id.into(); 372 let file_id: HirFileId = macro_call_id.into();
336 let raw_items = self.db.raw_items(file_id); 373 let raw_items = self.db.raw_items(file_id);
337 ModCollector { def_collector: &mut *self, file_id, module_id, raw_items: &raw_items } 374 ModCollector { def_collector: &mut *self, file_id, module_id, raw_items: &raw_items }
338 .collect(raw_items.items()) 375 .collect(raw_items.items());
339 } else { 376 } else {
340 log::error!("Too deep macro expansion: {}", macro_call_id.debug_dump(self.db)); 377 log::error!("Too deep macro expansion: {}", macro_call_id.debug_dump(self.db));
378 self.def_map.poison_macros.insert(macro_def_id);
341 } 379 }
342 380
343 self.marco_stack_count -= 1; 381 self.macro_stack_monitor.decrease(macro_def_id);
344 } 382 }
345 383
346 fn finish(self) -> CrateDefMap { 384 fn finish(self) -> CrateDefMap {
@@ -484,7 +522,7 @@ where
484 { 522 {
485 let macro_call_id = MacroCallLoc { def: macro_id, ast_id }.id(self.def_collector.db); 523 let macro_call_id = MacroCallLoc { def: macro_id, ast_id }.id(self.def_collector.db);
486 524
487 self.def_collector.collect_macro_expansion(self.module_id, macro_call_id); 525 self.def_collector.collect_macro_expansion(self.module_id, macro_call_id, macro_id);
488 return; 526 return;
489 } 527 }
490 528
@@ -530,3 +568,108 @@ fn resolve_submodule(
530 None => Err(if is_dir_owner { file_mod } else { file_dir_mod }), 568 None => Err(if is_dir_owner { file_mod } else { file_dir_mod }),
531 } 569 }
532} 570}
571
572#[cfg(test)]
573mod tests {
574 use ra_db::SourceDatabase;
575
576 use crate::{Crate, mock::MockDatabase, DefDatabase};
577 use ra_arena::{Arena};
578 use super::*;
579 use rustc_hash::FxHashSet;
580
581 fn do_collect_defs(
582 db: &impl DefDatabase,
583 def_map: CrateDefMap,
584 monitor: MacroStackMonitor,
585 ) -> CrateDefMap {
586 let mut collector = DefCollector {
587 db,
588 def_map,
589 glob_imports: FxHashMap::default(),
590 unresolved_imports: Vec::new(),
591 unexpanded_macros: Vec::new(),
592 global_macro_scope: FxHashMap::default(),
593 macro_stack_monitor: monitor,
594 };
595 collector.collect();
596 collector.finish()
597 }
598
599 fn do_limited_resolve(code: &str, limit: u32, poison_limit: u32) -> CrateDefMap {
600 let (db, _source_root, _) = MockDatabase::with_single_file(&code);
601 let crate_id = db.crate_graph().iter().next().unwrap();
602 let krate = Crate { crate_id };
603
604 let def_map = {
605 let edition = krate.edition(&db);
606 let mut modules: Arena<CrateModuleId, ModuleData> = Arena::default();
607 let root = modules.alloc(ModuleData::default());
608 CrateDefMap {
609 krate,
610 edition,
611 extern_prelude: FxHashMap::default(),
612 prelude: None,
613 root,
614 modules,
615 public_macros: FxHashMap::default(),
616 poison_macros: FxHashSet::default(),
617 diagnostics: Vec::new(),
618 }
619 };
620
621 let mut monitor = MacroStackMonitor::default();
622 monitor.validator = Some(Box::new(move |count| {
623 assert!(count < limit);
624 count >= poison_limit
625 }));
626
627 do_collect_defs(&db, def_map, monitor)
628 }
629
630 #[test]
631 fn test_macro_expand_limit_width() {
632 do_limited_resolve(
633 r#"
634 macro_rules! foo {
635 ($($ty:ty)*) => { foo!($($ty)*, $($ty)*); }
636 }
637foo!(KABOOM);
638 "#,
639 16,
640 1000,
641 );
642 }
643
644 #[test]
645 fn test_macro_expand_poisoned() {
646 let def = do_limited_resolve(
647 r#"
648 macro_rules! foo {
649 ($ty:ty) => { foo!($ty); }
650 }
651foo!(KABOOM);
652 "#,
653 100,
654 16,
655 );
656
657 assert_eq!(def.poison_macros.len(), 1);
658 }
659
660 #[test]
661 fn test_macro_expand_normal() {
662 let def = do_limited_resolve(
663 r#"
664 macro_rules! foo {
665 ($ident:ident) => { struct $ident {} }
666 }
667foo!(Bar);
668 "#,
669 16,
670 16,
671 );
672
673 assert_eq!(def.poison_macros.len(), 0);
674 }
675}
diff --git a/crates/ra_ide_api/src/completion/complete_postfix.rs b/crates/ra_ide_api/src/completion/complete_postfix.rs
index 4dfa5f176..278b1b797 100644
--- a/crates/ra_ide_api/src/completion/complete_postfix.rs
+++ b/crates/ra_ide_api/src/completion/complete_postfix.rs
@@ -40,7 +40,7 @@ pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
40 ctx, 40 ctx,
41 "match", 41 "match",
42 "match expr {}", 42 "match expr {}",
43 &format!("match {} {{\n${{1:_}} => {{$0\\}},\n}}", receiver_text), 43 &format!("match {} {{\n ${{1:_}} => {{$0\\}},\n}}", receiver_text),
44 ) 44 )
45 .add_to(acc); 45 .add_to(acc);
46 postfix_snippet( 46 postfix_snippet(
diff --git a/crates/ra_ide_api/src/completion/snapshots/completion_item__postfix_completion_works_for_trivial_path_expression.snap b/crates/ra_ide_api/src/completion/snapshots/completion_item__postfix_completion_works_for_trivial_path_expression.snap
index 2b5435f0c..3bbc9e3c4 100644
--- a/crates/ra_ide_api/src/completion/snapshots/completion_item__postfix_completion_works_for_trivial_path_expression.snap
+++ b/crates/ra_ide_api/src/completion/snapshots/completion_item__postfix_completion_works_for_trivial_path_expression.snap
@@ -1,6 +1,6 @@
1--- 1---
2created: "2019-02-18T09:22:24.127119709Z" 2created: "2019-04-22T07:37:13.981826301Z"
3creator: insta@0.6.2 3creator: insta@0.7.4
4source: crates/ra_ide_api/src/completion/completion_item.rs 4source: crates/ra_ide_api/src/completion/completion_item.rs
5expression: kind_completions 5expression: kind_completions
6--- 6---
@@ -23,7 +23,7 @@ expression: kind_completions
23 label: "match", 23 label: "match",
24 source_range: [76; 76), 24 source_range: [76; 76),
25 delete: [72; 76), 25 delete: [72; 76),
26 insert: "match bar {\n${1:_} => {$0\\},\n}", 26 insert: "match bar {\n ${1:_} => {$0\\},\n}",
27 detail: "match expr {}" 27 detail: "match expr {}"
28 }, 28 },
29 CompletionItem { 29 CompletionItem {
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs
index e78bc734b..eedc0c5dd 100644
--- a/crates/ra_mbe/src/lib.rs
+++ b/crates/ra_mbe/src/lib.rs
@@ -199,7 +199,7 @@ impl_froms!(TokenTree: Leaf, Subtree);
199 rules.expand(&invocation_tt).unwrap() 199 rules.expand(&invocation_tt).unwrap()
200 } 200 }
201 201
202 pub(crate) fn expand_to_syntax( 202 pub(crate) fn expand_to_items(
203 rules: &MacroRules, 203 rules: &MacroRules,
204 invocation: &str, 204 invocation: &str,
205 ) -> ra_syntax::TreeArc<ast::MacroItems> { 205 ) -> ra_syntax::TreeArc<ast::MacroItems> {
@@ -207,7 +207,28 @@ impl_froms!(TokenTree: Leaf, Subtree);
207 token_tree_to_macro_items(&expanded).unwrap() 207 token_tree_to_macro_items(&expanded).unwrap()
208 } 208 }
209 209
210 pub(crate) fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) { 210 #[allow(unused)]
211 pub(crate) fn expand_to_stmts(
212 rules: &MacroRules,
213 invocation: &str,
214 ) -> ra_syntax::TreeArc<ast::MacroStmts> {
215 let expanded = expand(rules, invocation);
216 token_tree_to_macro_stmts(&expanded).unwrap()
217 }
218
219 pub(crate) fn expand_to_expr(
220 rules: &MacroRules,
221 invocation: &str,
222 ) -> ra_syntax::TreeArc<ast::Expr> {
223 let expanded = expand(rules, invocation);
224 token_tree_to_expr(&expanded).unwrap()
225 }
226
227 pub(crate) fn assert_expansion(
228 rules: &MacroRules,
229 invocation: &str,
230 expansion: &str,
231 ) -> tt::Subtree {
211 let expanded = expand(rules, invocation); 232 let expanded = expand(rules, invocation);
212 assert_eq!(expanded.to_string(), expansion); 233 assert_eq!(expanded.to_string(), expansion);
213 234
@@ -224,6 +245,8 @@ impl_froms!(TokenTree: Leaf, Subtree);
224 245
225 let file = file.replace("C_C__C", "$crate"); 246 let file = file.replace("C_C__C", "$crate");
226 assert_eq!(tree, file,); 247 assert_eq!(tree, file,);
248
249 expanded
227 } 250 }
228 251
229 #[test] 252 #[test]
@@ -638,7 +661,7 @@ MACRO_ITEMS@[0; 40)
638 ); 661 );
639 662
640 assert_eq!( 663 assert_eq!(
641 expand_to_syntax(&rules, "foo! { 1 + 1 }").syntax().debug_dump().trim(), 664 expand_to_items(&rules, "foo! { 1 + 1 }").syntax().debug_dump().trim(),
642 r#"MACRO_ITEMS@[0; 15) 665 r#"MACRO_ITEMS@[0; 15)
643 FN_DEF@[0; 15) 666 FN_DEF@[0; 15)
644 FN_KW@[0; 2) "fn" 667 FN_KW@[0; 2) "fn"
@@ -914,6 +937,78 @@ MACRO_ITEMS@[0; 40)
914"#, 937"#,
915 ); 938 );
916 assert_expansion(&rules, r#"vec!();"#, r#"{let mut v = Vec :: new () ; v}"#); 939 assert_expansion(&rules, r#"vec!();"#, r#"{let mut v = Vec :: new () ; v}"#);
940 assert_expansion(
941 &rules,
942 r#"vec![1u32,2]"#,
943 r#"{let mut v = Vec :: new () ; v . push (1u32) ; v . push (2) ; v}"#,
944 );
945
946 assert_eq!(
947 expand_to_expr(&rules, r#"vec![1u32,2]"#).syntax().debug_dump().trim(),
948 r#"BLOCK_EXPR@[0; 45)
949 BLOCK@[0; 45)
950 L_CURLY@[0; 1) "{"
951 LET_STMT@[1; 20)
952 LET_KW@[1; 4) "let"
953 BIND_PAT@[4; 8)
954 MUT_KW@[4; 7) "mut"
955 NAME@[7; 8)
956 IDENT@[7; 8) "v"
957 EQ@[8; 9) "="
958 CALL_EXPR@[9; 19)
959 PATH_EXPR@[9; 17)
960 PATH@[9; 17)
961 PATH@[9; 12)
962 PATH_SEGMENT@[9; 12)
963 NAME_REF@[9; 12)
964 IDENT@[9; 12) "Vec"
965 COLONCOLON@[12; 14) "::"
966 PATH_SEGMENT@[14; 17)
967 NAME_REF@[14; 17)
968 IDENT@[14; 17) "new"
969 ARG_LIST@[17; 19)
970 L_PAREN@[17; 18) "("
971 R_PAREN@[18; 19) ")"
972 SEMI@[19; 20) ";"
973 EXPR_STMT@[20; 33)
974 METHOD_CALL_EXPR@[20; 32)
975 PATH_EXPR@[20; 21)
976 PATH@[20; 21)
977 PATH_SEGMENT@[20; 21)
978 NAME_REF@[20; 21)
979 IDENT@[20; 21) "v"
980 DOT@[21; 22) "."
981 NAME_REF@[22; 26)
982 IDENT@[22; 26) "push"
983 ARG_LIST@[26; 32)
984 L_PAREN@[26; 27) "("
985 LITERAL@[27; 31)
986 INT_NUMBER@[27; 31) "1u32"
987 R_PAREN@[31; 32) ")"
988 SEMI@[32; 33) ";"
989 EXPR_STMT@[33; 43)
990 METHOD_CALL_EXPR@[33; 42)
991 PATH_EXPR@[33; 34)
992 PATH@[33; 34)
993 PATH_SEGMENT@[33; 34)
994 NAME_REF@[33; 34)
995 IDENT@[33; 34) "v"
996 DOT@[34; 35) "."
997 NAME_REF@[35; 39)
998 IDENT@[35; 39) "push"
999 ARG_LIST@[39; 42)
1000 L_PAREN@[39; 40) "("
1001 LITERAL@[40; 41)
1002 INT_NUMBER@[40; 41) "2"
1003 R_PAREN@[41; 42) ")"
1004 SEMI@[42; 43) ";"
1005 PATH_EXPR@[43; 44)
1006 PATH@[43; 44)
1007 PATH_SEGMENT@[43; 44)
1008 NAME_REF@[43; 44)
1009 IDENT@[43; 44) "v"
1010 R_CURLY@[44; 45) "}""#
1011 );
917 } 1012 }
918 1013
919 #[test] 1014 #[test]
diff --git a/crates/ra_mbe/src/subtree_parser.rs b/crates/ra_mbe/src/subtree_parser.rs
index 528aa0f8a..f07107414 100644
--- a/crates/ra_mbe/src/subtree_parser.rs
+++ b/crates/ra_mbe/src/subtree_parser.rs
@@ -5,6 +5,7 @@ use ra_syntax::{SyntaxKind};
5 5
6struct OffsetTokenSink { 6struct OffsetTokenSink {
7 token_pos: usize, 7 token_pos: usize,
8 error: bool,
8} 9}
9 10
10impl TreeSink for OffsetTokenSink { 11impl TreeSink for OffsetTokenSink {
@@ -13,7 +14,9 @@ impl TreeSink for OffsetTokenSink {
13 } 14 }
14 fn start_node(&mut self, _kind: SyntaxKind) {} 15 fn start_node(&mut self, _kind: SyntaxKind) {}
15 fn finish_node(&mut self) {} 16 fn finish_node(&mut self) {}
16 fn error(&mut self, _error: ra_parser::ParseError) {} 17 fn error(&mut self, _error: ra_parser::ParseError) {
18 self.error = true;
19 }
17} 20}
18 21
19pub(crate) struct Parser<'a> { 22pub(crate) struct Parser<'a> {
@@ -67,11 +70,15 @@ impl<'a> Parser<'a> {
67 F: FnOnce(&dyn TokenSource, &mut dyn TreeSink), 70 F: FnOnce(&dyn TokenSource, &mut dyn TreeSink),
68 { 71 {
69 let mut src = SubtreeTokenSource::new(&self.subtree.token_trees[*self.cur_pos..]); 72 let mut src = SubtreeTokenSource::new(&self.subtree.token_trees[*self.cur_pos..]);
70 let mut sink = OffsetTokenSink { token_pos: 0 }; 73 let mut sink = OffsetTokenSink { token_pos: 0, error: false };
71 74
72 f(&src, &mut sink); 75 f(&src, &mut sink);
73 76
74 self.finish(sink.token_pos, &mut src) 77 let r = self.finish(sink.token_pos, &mut src);
78 if sink.error {
79 return None;
80 }
81 r
75 } 82 }
76 83
77 fn finish(self, parsed_token: usize, src: &mut SubtreeTokenSource) -> Option<tt::TokenTree> { 84 fn finish(self, parsed_token: usize, src: &mut SubtreeTokenSource) -> Option<tt::TokenTree> {
diff --git a/crates/ra_syntax/src/ast/extensions.rs b/crates/ra_syntax/src/ast/extensions.rs
index 5c4c0ffc1..9cbd2c6b8 100644
--- a/crates/ra_syntax/src/ast/extensions.rs
+++ b/crates/ra_syntax/src/ast/extensions.rs
@@ -210,6 +210,15 @@ impl ast::EnumVariant {
210 } 210 }
211} 211}
212 212
213impl ast::FnDef {
214 pub fn semicolon_token(&self) -> Option<SyntaxToken<'_>> {
215 self.syntax()
216 .last_child_or_token()
217 .and_then(|it| it.as_token())
218 .filter(|it| it.kind() == SEMI)
219 }
220}
221
213impl ast::LetStmt { 222impl ast::LetStmt {
214 pub fn has_semi(&self) -> bool { 223 pub fn has_semi(&self) -> bool {
215 match self.syntax().last_child_or_token() { 224 match self.syntax().last_child_or_token() {
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs
index a6ce14f06..9cb66b76b 100644
--- a/crates/ra_syntax/src/lib.rs
+++ b/crates/ra_syntax/src/lib.rs
@@ -38,7 +38,7 @@ pub use crate::{
38 ast::AstNode, 38 ast::AstNode,
39 syntax_error::{SyntaxError, SyntaxErrorKind, Location}, 39 syntax_error::{SyntaxError, SyntaxErrorKind, Location},
40 syntax_text::SyntaxText, 40 syntax_text::SyntaxText,
41 syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc, SyntaxTreeBuilder, SyntaxElement, SyntaxToken}, 41 syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc, SyntaxTreeBuilder, SyntaxElement, SyntaxToken, InsertPosition},
42 ptr::{SyntaxNodePtr, AstPtr}, 42 ptr::{SyntaxNodePtr, AstPtr},
43 parsing::{tokenize, classify_literal, Token}, 43 parsing::{tokenize, classify_literal, Token},
44}; 44};
diff --git a/crates/ra_syntax/src/ptr.rs b/crates/ra_syntax/src/ptr.rs
index 15a8b94cd..b0816b135 100644
--- a/crates/ra_syntax/src/ptr.rs
+++ b/crates/ra_syntax/src/ptr.rs
@@ -10,7 +10,7 @@ use crate::{
10/// specific node across reparses of the same file. 10/// specific node across reparses of the same file.
11#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 11#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
12pub struct SyntaxNodePtr { 12pub struct SyntaxNodePtr {
13 range: TextRange, 13 pub(crate) range: TextRange,
14 kind: SyntaxKind, 14 kind: SyntaxKind,
15} 15}
16 16
diff --git a/crates/ra_syntax/src/syntax_node.rs b/crates/ra_syntax/src/syntax_node.rs
index dc2352c76..92c15234e 100644
--- a/crates/ra_syntax/src/syntax_node.rs
+++ b/crates/ra_syntax/src/syntax_node.rs
@@ -7,6 +7,7 @@
7//! modules just wraps its API. 7//! modules just wraps its API.
8 8
9use std::{ 9use std::{
10 ops::RangeInclusive,
10 fmt::{self, Write}, 11 fmt::{self, Write},
11 any::Any, 12 any::Any,
12 borrow::Borrow, 13 borrow::Borrow,
@@ -17,13 +18,21 @@ use ra_parser::ParseError;
17use rowan::{TransparentNewType, GreenNodeBuilder}; 18use rowan::{TransparentNewType, GreenNodeBuilder};
18 19
19use crate::{ 20use crate::{
20 SmolStr, SyntaxKind, TextUnit, TextRange, SyntaxText, SourceFile, AstNode, 21 SmolStr, SyntaxKind, TextUnit, TextRange, SyntaxText, SourceFile, AstNode, SyntaxNodePtr,
21 syntax_error::{SyntaxError, SyntaxErrorKind}, 22 syntax_error::{SyntaxError, SyntaxErrorKind},
22}; 23};
23 24
24pub use rowan::WalkEvent; 25pub use rowan::WalkEvent;
25pub(crate) use rowan::{GreenNode, GreenToken}; 26pub(crate) use rowan::{GreenNode, GreenToken};
26 27
28#[derive(Debug, PartialEq, Eq, Clone, Copy)]
29pub enum InsertPosition<T> {
30 First,
31 Last,
32 Before(T),
33 After(T),
34}
35
27/// Marker trait for CST and AST nodes 36/// Marker trait for CST and AST nodes
28pub trait SyntaxNodeWrapper: TransparentNewType<Repr = rowan::SyntaxNode> {} 37pub trait SyntaxNodeWrapper: TransparentNewType<Repr = rowan::SyntaxNode> {}
29impl<T: TransparentNewType<Repr = rowan::SyntaxNode>> SyntaxNodeWrapper for T {} 38impl<T: TransparentNewType<Repr = rowan::SyntaxNode>> SyntaxNodeWrapper for T {}
@@ -309,6 +318,97 @@ impl SyntaxNode {
309 pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode { 318 pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode {
310 self.0.replace_with(replacement) 319 self.0.replace_with(replacement)
311 } 320 }
321
322 /// Adds specified children (tokens or nodes) to the current node at the
323 /// specific position.
324 ///
325 /// This is a type-unsafe low-level editing API, if you need to use it,
326 /// prefer to create a type-safe abstraction on top of it instead.
327 pub fn insert_children<'a>(
328 &self,
329 position: InsertPosition<SyntaxElement<'_>>,
330 to_insert: impl Iterator<Item = SyntaxElement<'a>>,
331 ) -> TreeArc<SyntaxNode> {
332 let mut delta = TextUnit::default();
333 let to_insert = to_insert.map(|element| {
334 delta += element.text_len();
335 to_green_element(element)
336 });
337
338 let old_children = self.0.green().children();
339
340 let new_children = match position {
341 InsertPosition::First => {
342 to_insert.chain(old_children.iter().cloned()).collect::<Box<[_]>>()
343 }
344 InsertPosition::Last => {
345 old_children.iter().cloned().chain(to_insert).collect::<Box<[_]>>()
346 }
347 InsertPosition::Before(anchor) | InsertPosition::After(anchor) => {
348 let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 };
349 let split_at = self.position_of_child(anchor) + take_anchor;
350 let (before, after) = old_children.split_at(split_at);
351 before
352 .iter()
353 .cloned()
354 .chain(to_insert)
355 .chain(after.iter().cloned())
356 .collect::<Box<[_]>>()
357 }
358 };
359
360 self.with_children(new_children)
361 }
362
363 /// Replaces all nodes in `to_delete` with nodes from `to_insert`
364 ///
365 /// This is a type-unsafe low-level editing API, if you need to use it,
366 /// prefer to create a type-safe abstraction on top of it instead.
367 pub fn replace_children<'a>(
368 &self,
369 to_delete: RangeInclusive<SyntaxElement<'_>>,
370 to_insert: impl Iterator<Item = SyntaxElement<'a>>,
371 ) -> TreeArc<SyntaxNode> {
372 let start = self.position_of_child(*to_delete.start());
373 let end = self.position_of_child(*to_delete.end());
374 let old_children = self.0.green().children();
375
376 let new_children = old_children[..start]
377 .iter()
378 .cloned()
379 .chain(to_insert.map(to_green_element))
380 .chain(old_children[end + 1..].iter().cloned())
381 .collect::<Box<[_]>>();
382 self.with_children(new_children)
383 }
384
385 fn with_children(&self, new_children: Box<[rowan::GreenElement]>) -> TreeArc<SyntaxNode> {
386 let len = new_children.iter().map(|it| it.text_len()).sum::<TextUnit>();
387 let new_node = GreenNode::new(rowan::SyntaxKind(self.kind() as u16), new_children);
388 let new_file_node = self.replace_with(new_node);
389 let file = SourceFile::new(new_file_node, Vec::new());
390
391 // FIXME: use a more elegant way to re-fetch the node (#1185), make
392 // `range` private afterwards
393 let mut ptr = SyntaxNodePtr::new(self);
394 ptr.range = TextRange::offset_len(ptr.range().start(), len);
395 return ptr.to_node(&file).to_owned();
396 }
397
398 fn position_of_child(&self, child: SyntaxElement) -> usize {
399 self.children_with_tokens()
400 .position(|it| it == child)
401 .expect("elemetn is not a child of current element")
402 }
403}
404
405fn to_green_element(element: SyntaxElement) -> rowan::GreenElement {
406 match element {
407 SyntaxElement::Node(node) => node.0.green().clone().into(),
408 SyntaxElement::Token(tok) => {
409 GreenToken::new(rowan::SyntaxKind(tok.kind() as u16), tok.text().clone()).into()
410 }
411 }
312} 412}
313 413
314#[derive(Clone, Copy, PartialEq, Eq, Hash)] 414#[derive(Clone, Copy, PartialEq, Eq, Hash)]
@@ -451,6 +551,13 @@ impl<'a> SyntaxElement<'a> {
451 } 551 }
452 .ancestors() 552 .ancestors()
453 } 553 }
554
555 fn text_len(&self) -> TextUnit {
556 match self {
557 SyntaxElement::Node(node) => node.0.green().text_len(),
558 SyntaxElement::Token(token) => TextUnit::of_str(token.0.text()),
559 }
560 }
454} 561}
455 562
456impl<'a> From<rowan::SyntaxElement<'a>> for SyntaxElement<'a> { 563impl<'a> From<rowan::SyntaxElement<'a>> for SyntaxElement<'a> {
diff --git a/crates/ra_tt/src/lib.rs b/crates/ra_tt/src/lib.rs
index 0b0b9b4d2..62c5ac52a 100644
--- a/crates/ra_tt/src/lib.rs
+++ b/crates/ra_tt/src/lib.rs
@@ -149,3 +149,19 @@ impl fmt::Display for Punct {
149 fmt::Display::fmt(&self.char, f) 149 fmt::Display::fmt(&self.char, f)
150 } 150 }
151} 151}
152
153impl Subtree {
154 /// Count the number of tokens recursively
155 pub fn count(&self) -> usize {
156 let children_count = self
157 .token_trees
158 .iter()
159 .map(|c| match c {
160 TokenTree::Subtree(c) => c.count(),
161 _ => 0,
162 })
163 .sum::<usize>();
164
165 self.token_trees.len() + children_count
166 }
167}