diff options
-rw-r--r-- | crates/hir_def/src/body/lower.rs | 68 | ||||
-rw-r--r-- | crates/hir_def/src/expr.rs | 4 | ||||
-rw-r--r-- | crates/hir_def/src/item_tree.rs | 9 | ||||
-rw-r--r-- | crates/hir_def/src/item_tree/lower.rs | 8 | ||||
-rw-r--r-- | crates/hir_expand/src/db.rs | 59 | ||||
-rw-r--r-- | crates/hir_ty/src/infer/expr.rs | 1 | ||||
-rw-r--r-- | crates/hir_ty/src/tests/macros.rs | 40 | ||||
-rw-r--r-- | crates/ide_assists/src/handlers/generate_impl.rs | 17 | ||||
-rw-r--r-- | crates/ide_assists/src/utils.rs | 9 | ||||
-rw-r--r-- | crates/ide_db/src/call_info/tests.rs | 3 | ||||
-rw-r--r-- | crates/ide_db/src/helpers/insert_use.rs | 3 | ||||
-rw-r--r-- | crates/ide_db/src/label.rs | 2 | ||||
-rw-r--r-- | crates/ide_db/src/search.rs | 3 | ||||
-rw-r--r-- | crates/ide_db/src/source_change.rs | 2 | ||||
-rw-r--r-- | crates/ide_db/src/traits.rs | 3 | ||||
-rw-r--r-- | crates/ide_db/src/traits/tests.rs | 3 | ||||
-rw-r--r-- | crates/ide_db/src/ty_filter.rs | 6 | ||||
-rw-r--r-- | crates/syntax/src/ast/node_ext.rs | 17 | ||||
-rw-r--r-- | crates/syntax/src/lib.rs | 2 | ||||
-rw-r--r-- | crates/syntax/src/token_text.rs | 77 | ||||
-rw-r--r-- | docs/user/manual.adoc | 9 |
21 files changed, 253 insertions, 92 deletions
diff --git a/crates/hir_def/src/body/lower.rs b/crates/hir_def/src/body/lower.rs index 19f5065d1..229e81dd4 100644 --- a/crates/hir_def/src/body/lower.rs +++ b/crates/hir_def/src/body/lower.rs | |||
@@ -74,6 +74,7 @@ pub(super) fn lower( | |||
74 | _c: Count::new(), | 74 | _c: Count::new(), |
75 | }, | 75 | }, |
76 | expander, | 76 | expander, |
77 | statements_in_scope: Vec::new(), | ||
77 | } | 78 | } |
78 | .collect(params, body) | 79 | .collect(params, body) |
79 | } | 80 | } |
@@ -83,6 +84,7 @@ struct ExprCollector<'a> { | |||
83 | expander: Expander, | 84 | expander: Expander, |
84 | body: Body, | 85 | body: Body, |
85 | source_map: BodySourceMap, | 86 | source_map: BodySourceMap, |
87 | statements_in_scope: Vec<Statement>, | ||
86 | } | 88 | } |
87 | 89 | ||
88 | impl ExprCollector<'_> { | 90 | impl ExprCollector<'_> { |
@@ -533,15 +535,13 @@ impl ExprCollector<'_> { | |||
533 | ids[0] | 535 | ids[0] |
534 | } | 536 | } |
535 | ast::Expr::MacroStmts(e) => { | 537 | ast::Expr::MacroStmts(e) => { |
536 | // FIXME: these statements should be held by some hir containter | 538 | e.statements().for_each(|s| self.collect_stmt(s)); |
537 | for stmt in e.statements() { | 539 | let tail = e |
538 | self.collect_stmt(stmt); | 540 | .expr() |
539 | } | 541 | .map(|e| self.collect_expr(e)) |
540 | if let Some(expr) = e.expr() { | 542 | .unwrap_or_else(|| self.alloc_expr(Expr::Missing, syntax_ptr.clone())); |
541 | self.collect_expr(expr) | 543 | |
542 | } else { | 544 | self.alloc_expr(Expr::MacroStmts { tail }, syntax_ptr) |
543 | self.alloc_expr(Expr::Missing, syntax_ptr) | ||
544 | } | ||
545 | } | 545 | } |
546 | }) | 546 | }) |
547 | } | 547 | } |
@@ -618,58 +618,54 @@ impl ExprCollector<'_> { | |||
618 | } | 618 | } |
619 | } | 619 | } |
620 | 620 | ||
621 | fn collect_stmt(&mut self, s: ast::Stmt) -> Option<Vec<Statement>> { | 621 | fn collect_stmt(&mut self, s: ast::Stmt) { |
622 | let stmt = match s { | 622 | match s { |
623 | ast::Stmt::LetStmt(stmt) => { | 623 | ast::Stmt::LetStmt(stmt) => { |
624 | self.check_cfg(&stmt)?; | 624 | if self.check_cfg(&stmt).is_none() { |
625 | 625 | return; | |
626 | } | ||
626 | let pat = self.collect_pat_opt(stmt.pat()); | 627 | let pat = self.collect_pat_opt(stmt.pat()); |
627 | let type_ref = stmt.ty().map(|it| TypeRef::from_ast(&self.ctx(), it)); | 628 | let type_ref = stmt.ty().map(|it| TypeRef::from_ast(&self.ctx(), it)); |
628 | let initializer = stmt.initializer().map(|e| self.collect_expr(e)); | 629 | let initializer = stmt.initializer().map(|e| self.collect_expr(e)); |
629 | vec![Statement::Let { pat, type_ref, initializer }] | 630 | self.statements_in_scope.push(Statement::Let { pat, type_ref, initializer }); |
630 | } | 631 | } |
631 | ast::Stmt::ExprStmt(stmt) => { | 632 | ast::Stmt::ExprStmt(stmt) => { |
632 | self.check_cfg(&stmt)?; | 633 | if self.check_cfg(&stmt).is_none() { |
634 | return; | ||
635 | } | ||
633 | 636 | ||
634 | // Note that macro could be expended to multiple statements | 637 | // Note that macro could be expended to multiple statements |
635 | if let Some(ast::Expr::MacroCall(m)) = stmt.expr() { | 638 | if let Some(ast::Expr::MacroCall(m)) = stmt.expr() { |
636 | let syntax_ptr = AstPtr::new(&stmt.expr().unwrap()); | 639 | let syntax_ptr = AstPtr::new(&stmt.expr().unwrap()); |
637 | let mut stmts = vec![]; | ||
638 | 640 | ||
639 | self.collect_macro_call(m, syntax_ptr.clone(), false, |this, expansion| { | 641 | self.collect_macro_call(m, syntax_ptr.clone(), false, |this, expansion| { |
640 | match expansion { | 642 | match expansion { |
641 | Some(expansion) => { | 643 | Some(expansion) => { |
642 | let statements: ast::MacroStmts = expansion; | 644 | let statements: ast::MacroStmts = expansion; |
643 | 645 | ||
644 | statements.statements().for_each(|stmt| { | 646 | statements.statements().for_each(|stmt| this.collect_stmt(stmt)); |
645 | if let Some(mut r) = this.collect_stmt(stmt) { | ||
646 | stmts.append(&mut r); | ||
647 | } | ||
648 | }); | ||
649 | if let Some(expr) = statements.expr() { | 647 | if let Some(expr) = statements.expr() { |
650 | stmts.push(Statement::Expr(this.collect_expr(expr))); | 648 | let expr = this.collect_expr(expr); |
649 | this.statements_in_scope.push(Statement::Expr(expr)); | ||
651 | } | 650 | } |
652 | } | 651 | } |
653 | None => { | 652 | None => { |
654 | stmts.push(Statement::Expr( | 653 | let expr = this.alloc_expr(Expr::Missing, syntax_ptr.clone()); |
655 | this.alloc_expr(Expr::Missing, syntax_ptr.clone()), | 654 | this.statements_in_scope.push(Statement::Expr(expr)); |
656 | )); | ||
657 | } | 655 | } |
658 | } | 656 | } |
659 | }); | 657 | }); |
660 | stmts | ||
661 | } else { | 658 | } else { |
662 | vec![Statement::Expr(self.collect_expr_opt(stmt.expr()))] | 659 | let expr = self.collect_expr_opt(stmt.expr()); |
660 | self.statements_in_scope.push(Statement::Expr(expr)); | ||
663 | } | 661 | } |
664 | } | 662 | } |
665 | ast::Stmt::Item(item) => { | 663 | ast::Stmt::Item(item) => { |
666 | self.check_cfg(&item)?; | 664 | if self.check_cfg(&item).is_none() { |
667 | 665 | return; | |
668 | return None; | 666 | } |
669 | } | 667 | } |
670 | }; | 668 | } |
671 | |||
672 | Some(stmt) | ||
673 | } | 669 | } |
674 | 670 | ||
675 | fn collect_block(&mut self, block: ast::BlockExpr) -> ExprId { | 671 | fn collect_block(&mut self, block: ast::BlockExpr) -> ExprId { |
@@ -685,10 +681,12 @@ impl ExprCollector<'_> { | |||
685 | let module = if has_def_map { def_map.root() } else { self.expander.module }; | 681 | let module = if has_def_map { def_map.root() } else { self.expander.module }; |
686 | let prev_def_map = mem::replace(&mut self.expander.def_map, def_map); | 682 | let prev_def_map = mem::replace(&mut self.expander.def_map, def_map); |
687 | let prev_local_module = mem::replace(&mut self.expander.module, module); | 683 | let prev_local_module = mem::replace(&mut self.expander.module, module); |
684 | let prev_statements = std::mem::take(&mut self.statements_in_scope); | ||
685 | |||
686 | block.statements().for_each(|s| self.collect_stmt(s)); | ||
688 | 687 | ||
689 | let statements = | ||
690 | block.statements().filter_map(|s| self.collect_stmt(s)).flatten().collect(); | ||
691 | let tail = block.tail_expr().map(|e| self.collect_expr(e)); | 688 | let tail = block.tail_expr().map(|e| self.collect_expr(e)); |
689 | let statements = std::mem::replace(&mut self.statements_in_scope, prev_statements); | ||
692 | let syntax_node_ptr = AstPtr::new(&block.into()); | 690 | let syntax_node_ptr = AstPtr::new(&block.into()); |
693 | let expr_id = self.alloc_expr( | 691 | let expr_id = self.alloc_expr( |
694 | Expr::Block { id: block_id, statements, tail, label: None }, | 692 | Expr::Block { id: block_id, statements, tail, label: None }, |
diff --git a/crates/hir_def/src/expr.rs b/crates/hir_def/src/expr.rs index 24be93773..6c7376fad 100644 --- a/crates/hir_def/src/expr.rs +++ b/crates/hir_def/src/expr.rs | |||
@@ -171,6 +171,9 @@ pub enum Expr { | |||
171 | Unsafe { | 171 | Unsafe { |
172 | body: ExprId, | 172 | body: ExprId, |
173 | }, | 173 | }, |
174 | MacroStmts { | ||
175 | tail: ExprId, | ||
176 | }, | ||
174 | Array(Array), | 177 | Array(Array), |
175 | Literal(Literal), | 178 | Literal(Literal), |
176 | } | 179 | } |
@@ -357,6 +360,7 @@ impl Expr { | |||
357 | f(*repeat) | 360 | f(*repeat) |
358 | } | 361 | } |
359 | }, | 362 | }, |
363 | Expr::MacroStmts { tail } => f(*tail), | ||
360 | Expr::Literal(_) => {} | 364 | Expr::Literal(_) => {} |
361 | } | 365 | } |
362 | } | 366 | } |
diff --git a/crates/hir_def/src/item_tree.rs b/crates/hir_def/src/item_tree.rs index ae2475b4e..ca0048b16 100644 --- a/crates/hir_def/src/item_tree.rs +++ b/crates/hir_def/src/item_tree.rs | |||
@@ -110,15 +110,6 @@ impl ItemTree { | |||
110 | // still need to collect inner items. | 110 | // still need to collect inner items. |
111 | ctx.lower_inner_items(e.syntax()) | 111 | ctx.lower_inner_items(e.syntax()) |
112 | }, | 112 | }, |
113 | ast::ExprStmt(stmt) => { | ||
114 | // Macros can expand to stmt. We return an empty item tree in this case, but | ||
115 | // still need to collect inner items. | ||
116 | ctx.lower_inner_items(stmt.syntax()) | ||
117 | }, | ||
118 | ast::Item(item) => { | ||
119 | // Macros can expand to stmt and other item, and we add it as top level item | ||
120 | ctx.lower_single_item(item) | ||
121 | }, | ||
122 | _ => { | 113 | _ => { |
123 | panic!("cannot create item tree from {:?} {}", syntax, syntax); | 114 | panic!("cannot create item tree from {:?} {}", syntax, syntax); |
124 | }, | 115 | }, |
diff --git a/crates/hir_def/src/item_tree/lower.rs b/crates/hir_def/src/item_tree/lower.rs index d3fe1ce1e..3f558edd8 100644 --- a/crates/hir_def/src/item_tree/lower.rs +++ b/crates/hir_def/src/item_tree/lower.rs | |||
@@ -87,14 +87,6 @@ impl Ctx { | |||
87 | self.tree | 87 | self.tree |
88 | } | 88 | } |
89 | 89 | ||
90 | pub(super) fn lower_single_item(mut self, item: ast::Item) -> ItemTree { | ||
91 | self.tree.top_level = self | ||
92 | .lower_mod_item(&item, false) | ||
93 | .map(|item| item.0) | ||
94 | .unwrap_or_else(|| Default::default()); | ||
95 | self.tree | ||
96 | } | ||
97 | |||
98 | pub(super) fn lower_inner_items(mut self, within: &SyntaxNode) -> ItemTree { | 90 | pub(super) fn lower_inner_items(mut self, within: &SyntaxNode) -> ItemTree { |
99 | self.collect_inner_items(within); | 91 | self.collect_inner_items(within); |
100 | self.tree | 92 | self.tree |
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index fc73e435b..d672f6723 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs | |||
@@ -5,7 +5,13 @@ use std::sync::Arc; | |||
5 | use base_db::{salsa, SourceDatabase}; | 5 | use base_db::{salsa, SourceDatabase}; |
6 | use mbe::{ExpandError, ExpandResult, MacroRules}; | 6 | use mbe::{ExpandError, ExpandResult, MacroRules}; |
7 | use parser::FragmentKind; | 7 | use parser::FragmentKind; |
8 | use syntax::{algo::diff, ast::NameOwner, AstNode, GreenNode, Parse, SyntaxKind::*, SyntaxNode}; | 8 | use syntax::{ |
9 | algo::diff, | ||
10 | ast::{MacroStmts, NameOwner}, | ||
11 | AstNode, GreenNode, Parse, | ||
12 | SyntaxKind::*, | ||
13 | SyntaxNode, | ||
14 | }; | ||
9 | 15 | ||
10 | use crate::{ | 16 | use crate::{ |
11 | ast_id_map::AstIdMap, hygiene::HygieneFrame, BuiltinDeriveExpander, BuiltinFnLikeExpander, | 17 | ast_id_map::AstIdMap, hygiene::HygieneFrame, BuiltinDeriveExpander, BuiltinFnLikeExpander, |
@@ -340,13 +346,19 @@ fn parse_macro_with_arg( | |||
340 | None => return ExpandResult { value: None, err: result.err }, | 346 | None => return ExpandResult { value: None, err: result.err }, |
341 | }; | 347 | }; |
342 | 348 | ||
343 | log::debug!("expanded = {}", tt.as_debug_string()); | ||
344 | |||
345 | let fragment_kind = to_fragment_kind(db, macro_call_id); | 349 | let fragment_kind = to_fragment_kind(db, macro_call_id); |
346 | 350 | ||
351 | log::debug!("expanded = {}", tt.as_debug_string()); | ||
352 | log::debug!("kind = {:?}", fragment_kind); | ||
353 | |||
347 | let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) { | 354 | let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) { |
348 | Ok(it) => it, | 355 | Ok(it) => it, |
349 | Err(err) => { | 356 | Err(err) => { |
357 | log::debug!( | ||
358 | "failed to parse expanstion to {:?} = {}", | ||
359 | fragment_kind, | ||
360 | tt.as_debug_string() | ||
361 | ); | ||
350 | return ExpandResult::only_err(err); | 362 | return ExpandResult::only_err(err); |
351 | } | 363 | } |
352 | }; | 364 | }; |
@@ -362,15 +374,34 @@ fn parse_macro_with_arg( | |||
362 | return ExpandResult::only_err(err); | 374 | return ExpandResult::only_err(err); |
363 | } | 375 | } |
364 | }; | 376 | }; |
365 | 377 | if is_self_replicating(&node, &call_node.value) { | |
366 | if !diff(&node, &call_node.value).is_empty() { | ||
367 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) } | ||
368 | } else { | ||
369 | return ExpandResult::only_err(err); | 378 | return ExpandResult::only_err(err); |
379 | } else { | ||
380 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) } | ||
381 | } | ||
382 | } | ||
383 | None => { | ||
384 | log::debug!("parse = {:?}", parse.syntax_node().kind()); | ||
385 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None } | ||
386 | } | ||
387 | } | ||
388 | } | ||
389 | |||
390 | fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool { | ||
391 | if diff(from, to).is_empty() { | ||
392 | return true; | ||
393 | } | ||
394 | if let Some(stmts) = MacroStmts::cast(from.clone()) { | ||
395 | if stmts.statements().any(|stmt| diff(stmt.syntax(), to).is_empty()) { | ||
396 | return true; | ||
397 | } | ||
398 | if let Some(expr) = stmts.expr() { | ||
399 | if diff(expr.syntax(), to).is_empty() { | ||
400 | return true; | ||
370 | } | 401 | } |
371 | } | 402 | } |
372 | None => ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None }, | ||
373 | } | 403 | } |
404 | false | ||
374 | } | 405 | } |
375 | 406 | ||
376 | fn hygiene_frame(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<HygieneFrame> { | 407 | fn hygiene_frame(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<HygieneFrame> { |
@@ -390,21 +421,15 @@ fn to_fragment_kind(db: &dyn AstDatabase, id: MacroCallId) -> FragmentKind { | |||
390 | 421 | ||
391 | let parent = match syn.parent() { | 422 | let parent = match syn.parent() { |
392 | Some(it) => it, | 423 | Some(it) => it, |
393 | None => { | 424 | None => return FragmentKind::Statements, |
394 | // FIXME: | ||
395 | // If it is root, which means the parent HirFile | ||
396 | // MacroKindFile must be non-items | ||
397 | // return expr now. | ||
398 | return FragmentKind::Expr; | ||
399 | } | ||
400 | }; | 425 | }; |
401 | 426 | ||
402 | match parent.kind() { | 427 | match parent.kind() { |
403 | MACRO_ITEMS | SOURCE_FILE => FragmentKind::Items, | 428 | MACRO_ITEMS | SOURCE_FILE => FragmentKind::Items, |
404 | MACRO_STMTS => FragmentKind::Statement, | 429 | MACRO_STMTS => FragmentKind::Statements, |
405 | ITEM_LIST => FragmentKind::Items, | 430 | ITEM_LIST => FragmentKind::Items, |
406 | LET_STMT => { | 431 | LET_STMT => { |
407 | // FIXME: Handle Pattern | 432 | // FIXME: Handle LHS Pattern |
408 | FragmentKind::Expr | 433 | FragmentKind::Expr |
409 | } | 434 | } |
410 | EXPR_STMT => FragmentKind::Statements, | 435 | EXPR_STMT => FragmentKind::Statements, |
diff --git a/crates/hir_ty/src/infer/expr.rs b/crates/hir_ty/src/infer/expr.rs index 3f3187ea2..e6ede05ca 100644 --- a/crates/hir_ty/src/infer/expr.rs +++ b/crates/hir_ty/src/infer/expr.rs | |||
@@ -767,6 +767,7 @@ impl<'a> InferenceContext<'a> { | |||
767 | None => self.table.new_float_var(), | 767 | None => self.table.new_float_var(), |
768 | }, | 768 | }, |
769 | }, | 769 | }, |
770 | Expr::MacroStmts { tail } => self.infer_expr(*tail, expected), | ||
770 | }; | 771 | }; |
771 | // use a new type variable if we got unknown here | 772 | // use a new type variable if we got unknown here |
772 | let ty = self.insert_type_vars_shallow(ty); | 773 | let ty = self.insert_type_vars_shallow(ty); |
diff --git a/crates/hir_ty/src/tests/macros.rs b/crates/hir_ty/src/tests/macros.rs index 7eda51866..01935ec99 100644 --- a/crates/hir_ty/src/tests/macros.rs +++ b/crates/hir_ty/src/tests/macros.rs | |||
@@ -226,12 +226,49 @@ fn expr_macro_expanded_in_stmts() { | |||
226 | "#, | 226 | "#, |
227 | expect![[r#" | 227 | expect![[r#" |
228 | !0..8 'leta=();': () | 228 | !0..8 'leta=();': () |
229 | !0..8 'leta=();': () | ||
230 | !3..4 'a': () | ||
231 | !5..7 '()': () | ||
229 | 57..84 '{ ...); } }': () | 232 | 57..84 '{ ...); } }': () |
230 | "#]], | 233 | "#]], |
231 | ); | 234 | ); |
232 | } | 235 | } |
233 | 236 | ||
234 | #[test] | 237 | #[test] |
238 | fn recurisve_macro_expanded_in_stmts() { | ||
239 | check_infer( | ||
240 | r#" | ||
241 | macro_rules! ng { | ||
242 | ([$($tts:tt)*]) => { | ||
243 | $($tts)*; | ||
244 | }; | ||
245 | ([$($tts:tt)*] $head:tt $($rest:tt)*) => { | ||
246 | ng! { | ||
247 | [$($tts)* $head] $($rest)* | ||
248 | } | ||
249 | }; | ||
250 | } | ||
251 | fn foo() { | ||
252 | ng!([] let a = 3); | ||
253 | let b = a; | ||
254 | } | ||
255 | "#, | ||
256 | expect![[r#" | ||
257 | !0..7 'leta=3;': {unknown} | ||
258 | !0..7 'leta=3;': {unknown} | ||
259 | !0..13 'ng!{[leta=3]}': {unknown} | ||
260 | !0..13 'ng!{[leta=]3}': {unknown} | ||
261 | !0..13 'ng!{[leta]=3}': {unknown} | ||
262 | !3..4 'a': i32 | ||
263 | !5..6 '3': i32 | ||
264 | 196..237 '{ ...= a; }': () | ||
265 | 229..230 'b': i32 | ||
266 | 233..234 'a': i32 | ||
267 | "#]], | ||
268 | ); | ||
269 | } | ||
270 | |||
271 | #[test] | ||
235 | fn recursive_inner_item_macro_rules() { | 272 | fn recursive_inner_item_macro_rules() { |
236 | check_infer( | 273 | check_infer( |
237 | r#" | 274 | r#" |
@@ -246,7 +283,8 @@ fn recursive_inner_item_macro_rules() { | |||
246 | "#, | 283 | "#, |
247 | expect![[r#" | 284 | expect![[r#" |
248 | !0..1 '1': i32 | 285 | !0..1 '1': i32 |
249 | !0..7 'mac!($)': {unknown} | 286 | !0..26 'macro_...>{1};}': {unknown} |
287 | !0..26 'macro_...>{1};}': {unknown} | ||
250 | 107..143 '{ ...!(); }': () | 288 | 107..143 '{ ...!(); }': () |
251 | 129..130 'a': i32 | 289 | 129..130 'a': i32 |
252 | "#]], | 290 | "#]], |
diff --git a/crates/ide_assists/src/handlers/generate_impl.rs b/crates/ide_assists/src/handlers/generate_impl.rs index a8e3c4fc2..fd2e250bc 100644 --- a/crates/ide_assists/src/handlers/generate_impl.rs +++ b/crates/ide_assists/src/handlers/generate_impl.rs | |||
@@ -72,6 +72,17 @@ mod tests { | |||
72 | check_assist( | 72 | check_assist( |
73 | generate_impl, | 73 | generate_impl, |
74 | r#" | 74 | r#" |
75 | struct MyOwnArray<T, const S: usize> {}$0"#, | ||
76 | r#" | ||
77 | struct MyOwnArray<T, const S: usize> {} | ||
78 | |||
79 | impl<T, const S: usize> MyOwnArray<T, S> { | ||
80 | $0 | ||
81 | }"#, | ||
82 | ); | ||
83 | check_assist( | ||
84 | generate_impl, | ||
85 | r#" | ||
75 | #[cfg(feature = "foo")] | 86 | #[cfg(feature = "foo")] |
76 | struct Foo<'a, T: Foo<'a>> {$0}"#, | 87 | struct Foo<'a, T: Foo<'a>> {$0}"#, |
77 | r#" | 88 | r#" |
@@ -114,11 +125,11 @@ mod tests { | |||
114 | check_assist( | 125 | check_assist( |
115 | generate_impl, | 126 | generate_impl, |
116 | r#" | 127 | r#" |
117 | struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String> {}$0"#, | 128 | struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {}$0"#, |
118 | r#" | 129 | r#" |
119 | struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String> {} | 130 | struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} |
120 | 131 | ||
121 | impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b> Defaulted<'a, 'b, T> { | 132 | impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> Defaulted<'a, 'b, T, S> { |
122 | $0 | 133 | $0 |
123 | }"#, | 134 | }"#, |
124 | ); | 135 | ); |
diff --git a/crates/ide_assists/src/utils.rs b/crates/ide_assists/src/utils.rs index 9de9e4dbd..5f630ec75 100644 --- a/crates/ide_assists/src/utils.rs +++ b/crates/ide_assists/src/utils.rs | |||
@@ -434,7 +434,8 @@ fn generate_impl_text_inner(adt: &ast::Adt, trait_text: Option<&str>, code: &str | |||
434 | } | 434 | } |
435 | buf | 435 | buf |
436 | }); | 436 | }); |
437 | let generics = lifetimes.chain(type_params).format(", "); | 437 | let const_params = generic_params.const_params().map(|t| t.syntax().to_string()); |
438 | let generics = lifetimes.chain(type_params).chain(const_params).format(", "); | ||
438 | format_to!(buf, "<{}>", generics); | 439 | format_to!(buf, "<{}>", generics); |
439 | } | 440 | } |
440 | buf.push(' '); | 441 | buf.push(' '); |
@@ -452,7 +453,11 @@ fn generate_impl_text_inner(adt: &ast::Adt, trait_text: Option<&str>, code: &str | |||
452 | .type_params() | 453 | .type_params() |
453 | .filter_map(|it| it.name()) | 454 | .filter_map(|it| it.name()) |
454 | .map(|it| SmolStr::from(it.text())); | 455 | .map(|it| SmolStr::from(it.text())); |
455 | format_to!(buf, "<{}>", lifetime_params.chain(type_params).format(", ")) | 456 | let const_params = generic_params |
457 | .const_params() | ||
458 | .filter_map(|it| it.name()) | ||
459 | .map(|it| SmolStr::from(it.text())); | ||
460 | format_to!(buf, "<{}>", lifetime_params.chain(type_params).chain(const_params).format(", ")) | ||
456 | } | 461 | } |
457 | 462 | ||
458 | match adt.where_clause() { | 463 | match adt.where_clause() { |
diff --git a/crates/ide_db/src/call_info/tests.rs b/crates/ide_db/src/call_info/tests.rs index 75ab3eb6e..281a081a3 100644 --- a/crates/ide_db/src/call_info/tests.rs +++ b/crates/ide_db/src/call_info/tests.rs | |||
@@ -1,8 +1,9 @@ | |||
1 | use crate::RootDatabase; | ||
2 | use base_db::{fixture::ChangeFixture, FilePosition}; | 1 | use base_db::{fixture::ChangeFixture, FilePosition}; |
3 | use expect_test::{expect, Expect}; | 2 | use expect_test::{expect, Expect}; |
4 | use test_utils::RangeOrOffset; | 3 | use test_utils::RangeOrOffset; |
5 | 4 | ||
5 | use crate::RootDatabase; | ||
6 | |||
6 | /// Creates analysis from a multi-file fixture, returns positions marked with $0. | 7 | /// Creates analysis from a multi-file fixture, returns positions marked with $0. |
7 | pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) { | 8 | pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) { |
8 | let change_fixture = ChangeFixture::parse(ra_fixture); | 9 | let change_fixture = ChangeFixture::parse(ra_fixture); |
diff --git a/crates/ide_db/src/helpers/insert_use.rs b/crates/ide_db/src/helpers/insert_use.rs index e681ced80..be3a22725 100644 --- a/crates/ide_db/src/helpers/insert_use.rs +++ b/crates/ide_db/src/helpers/insert_use.rs | |||
@@ -1,7 +1,6 @@ | |||
1 | //! Handle syntactic aspects of inserting a new `use`. | 1 | //! Handle syntactic aspects of inserting a new `use`. |
2 | use std::{cmp::Ordering, iter::successors}; | 2 | use std::{cmp::Ordering, iter::successors}; |
3 | 3 | ||
4 | use crate::RootDatabase; | ||
5 | use hir::Semantics; | 4 | use hir::Semantics; |
6 | use itertools::{EitherOrBoth, Itertools}; | 5 | use itertools::{EitherOrBoth, Itertools}; |
7 | use syntax::{ | 6 | use syntax::{ |
@@ -14,6 +13,8 @@ use syntax::{ | |||
14 | AstToken, InsertPosition, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken, | 13 | AstToken, InsertPosition, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken, |
15 | }; | 14 | }; |
16 | 15 | ||
16 | use crate::RootDatabase; | ||
17 | |||
17 | pub use hir::PrefixKind; | 18 | pub use hir::PrefixKind; |
18 | 19 | ||
19 | #[derive(Clone, Copy, Debug, PartialEq, Eq)] | 20 | #[derive(Clone, Copy, Debug, PartialEq, Eq)] |
diff --git a/crates/ide_db/src/label.rs b/crates/ide_db/src/label.rs index c0e89e72f..1f1e715c9 100644 --- a/crates/ide_db/src/label.rs +++ b/crates/ide_db/src/label.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | //! See `Label` | 1 | //! See [`Label`] |
2 | use std::fmt; | 2 | use std::fmt; |
3 | 3 | ||
4 | /// A type to specify UI label, like an entry in the list of assists. Enforces | 4 | /// A type to specify UI label, like an entry in the list of assists. Enforces |
diff --git a/crates/ide_db/src/search.rs b/crates/ide_db/src/search.rs index 3634b2b26..b55e3851e 100644 --- a/crates/ide_db/src/search.rs +++ b/crates/ide_db/src/search.rs | |||
@@ -12,9 +12,8 @@ use once_cell::unsync::Lazy; | |||
12 | use rustc_hash::FxHashMap; | 12 | use rustc_hash::FxHashMap; |
13 | use syntax::{ast, match_ast, AstNode, TextRange, TextSize}; | 13 | use syntax::{ast, match_ast, AstNode, TextRange, TextSize}; |
14 | 14 | ||
15 | use crate::defs::NameClass; | ||
16 | use crate::{ | 15 | use crate::{ |
17 | defs::{Definition, NameRefClass}, | 16 | defs::{Definition, NameClass, NameRefClass}, |
18 | RootDatabase, | 17 | RootDatabase, |
19 | }; | 18 | }; |
20 | 19 | ||
diff --git a/crates/ide_db/src/source_change.rs b/crates/ide_db/src/source_change.rs index b36455d49..846530f78 100644 --- a/crates/ide_db/src/source_change.rs +++ b/crates/ide_db/src/source_change.rs | |||
@@ -37,6 +37,8 @@ impl SourceChange { | |||
37 | } | 37 | } |
38 | } | 38 | } |
39 | 39 | ||
40 | /// Inserts a [`TextEdit`] for the given [`FileId`]. This properly handles merging existing | ||
41 | /// edits for a file if some already exist. | ||
40 | pub fn insert_source_edit(&mut self, file_id: FileId, edit: TextEdit) { | 42 | pub fn insert_source_edit(&mut self, file_id: FileId, edit: TextEdit) { |
41 | match self.source_file_edits.entry(file_id) { | 43 | match self.source_file_edits.entry(file_id) { |
42 | Entry::Occupied(mut entry) => { | 44 | Entry::Occupied(mut entry) => { |
diff --git a/crates/ide_db/src/traits.rs b/crates/ide_db/src/traits.rs index 78a43f587..66ae81c73 100644 --- a/crates/ide_db/src/traits.rs +++ b/crates/ide_db/src/traits.rs | |||
@@ -61,7 +61,7 @@ pub fn get_missing_assoc_items( | |||
61 | resolve_target_trait(sema, impl_def).map_or(vec![], |target_trait| { | 61 | resolve_target_trait(sema, impl_def).map_or(vec![], |target_trait| { |
62 | target_trait | 62 | target_trait |
63 | .items(sema.db) | 63 | .items(sema.db) |
64 | .iter() | 64 | .into_iter() |
65 | .filter(|i| match i { | 65 | .filter(|i| match i { |
66 | hir::AssocItem::Function(f) => { | 66 | hir::AssocItem::Function(f) => { |
67 | !impl_fns_consts.contains(&f.name(sema.db).to_string()) | 67 | !impl_fns_consts.contains(&f.name(sema.db).to_string()) |
@@ -72,7 +72,6 @@ pub fn get_missing_assoc_items( | |||
72 | .map(|n| !impl_fns_consts.contains(&n.to_string())) | 72 | .map(|n| !impl_fns_consts.contains(&n.to_string())) |
73 | .unwrap_or_default(), | 73 | .unwrap_or_default(), |
74 | }) | 74 | }) |
75 | .cloned() | ||
76 | .collect() | 75 | .collect() |
77 | }) | 76 | }) |
78 | } | 77 | } |
diff --git a/crates/ide_db/src/traits/tests.rs b/crates/ide_db/src/traits/tests.rs index 84bb25505..2a5482024 100644 --- a/crates/ide_db/src/traits/tests.rs +++ b/crates/ide_db/src/traits/tests.rs | |||
@@ -1,10 +1,11 @@ | |||
1 | use crate::RootDatabase; | ||
2 | use base_db::{fixture::ChangeFixture, FilePosition}; | 1 | use base_db::{fixture::ChangeFixture, FilePosition}; |
3 | use expect_test::{expect, Expect}; | 2 | use expect_test::{expect, Expect}; |
4 | use hir::Semantics; | 3 | use hir::Semantics; |
5 | use syntax::ast::{self, AstNode}; | 4 | use syntax::ast::{self, AstNode}; |
6 | use test_utils::RangeOrOffset; | 5 | use test_utils::RangeOrOffset; |
7 | 6 | ||
7 | use crate::RootDatabase; | ||
8 | |||
8 | /// Creates analysis from a multi-file fixture, returns positions marked with $0. | 9 | /// Creates analysis from a multi-file fixture, returns positions marked with $0. |
9 | pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) { | 10 | pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) { |
10 | let change_fixture = ChangeFixture::parse(ra_fixture); | 11 | let change_fixture = ChangeFixture::parse(ra_fixture); |
diff --git a/crates/ide_db/src/ty_filter.rs b/crates/ide_db/src/ty_filter.rs index f8406851b..988ecd060 100644 --- a/crates/ide_db/src/ty_filter.rs +++ b/crates/ide_db/src/ty_filter.rs | |||
@@ -2,11 +2,13 @@ | |||
2 | //! Use case for structures in this module is, for example, situation when you need to process | 2 | //! Use case for structures in this module is, for example, situation when you need to process |
3 | //! only certain `Enum`s. | 3 | //! only certain `Enum`s. |
4 | 4 | ||
5 | use crate::RootDatabase; | ||
6 | use hir::{Adt, Semantics, Type}; | ||
7 | use std::iter; | 5 | use std::iter; |
6 | |||
7 | use hir::{Adt, Semantics, Type}; | ||
8 | use syntax::ast::{self, make}; | 8 | use syntax::ast::{self, make}; |
9 | 9 | ||
10 | use crate::RootDatabase; | ||
11 | |||
10 | /// Enum types that implement `std::ops::Try` trait. | 12 | /// Enum types that implement `std::ops::Try` trait. |
11 | #[derive(Clone, Copy)] | 13 | #[derive(Clone, Copy)] |
12 | pub enum TryEnum { | 14 | pub enum TryEnum { |
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index 6d7db5fb2..2772d7364 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs | |||
@@ -8,23 +8,23 @@ use parser::SyntaxKind; | |||
8 | 8 | ||
9 | use crate::{ | 9 | use crate::{ |
10 | ast::{self, support, AstNode, AstToken, AttrsOwner, NameOwner, SyntaxNode}, | 10 | ast::{self, support, AstNode, AstToken, AttrsOwner, NameOwner, SyntaxNode}, |
11 | SmolStr, SyntaxElement, SyntaxToken, T, | 11 | SmolStr, SyntaxElement, SyntaxToken, TokenText, T, |
12 | }; | 12 | }; |
13 | 13 | ||
14 | impl ast::Lifetime { | 14 | impl ast::Lifetime { |
15 | pub fn text(&self) -> SmolStr { | 15 | pub fn text(&self) -> TokenText { |
16 | text_of_first_token(self.syntax()) | 16 | text_of_first_token(self.syntax()) |
17 | } | 17 | } |
18 | } | 18 | } |
19 | 19 | ||
20 | impl ast::Name { | 20 | impl ast::Name { |
21 | pub fn text(&self) -> SmolStr { | 21 | pub fn text(&self) -> TokenText { |
22 | text_of_first_token(self.syntax()) | 22 | text_of_first_token(self.syntax()) |
23 | } | 23 | } |
24 | } | 24 | } |
25 | 25 | ||
26 | impl ast::NameRef { | 26 | impl ast::NameRef { |
27 | pub fn text(&self) -> SmolStr { | 27 | pub fn text(&self) -> TokenText { |
28 | text_of_first_token(self.syntax()) | 28 | text_of_first_token(self.syntax()) |
29 | } | 29 | } |
30 | 30 | ||
@@ -33,8 +33,11 @@ impl ast::NameRef { | |||
33 | } | 33 | } |
34 | } | 34 | } |
35 | 35 | ||
36 | fn text_of_first_token(node: &SyntaxNode) -> SmolStr { | 36 | fn text_of_first_token(node: &SyntaxNode) -> TokenText { |
37 | node.green().children().next().and_then(|it| it.into_token()).unwrap().text().into() | 37 | let first_token = |
38 | node.green().children().next().and_then(|it| it.into_token()).unwrap().to_owned(); | ||
39 | |||
40 | TokenText(first_token) | ||
38 | } | 41 | } |
39 | 42 | ||
40 | pub enum Macro { | 43 | pub enum Macro { |
@@ -376,7 +379,7 @@ impl fmt::Display for NameOrNameRef { | |||
376 | } | 379 | } |
377 | 380 | ||
378 | impl NameOrNameRef { | 381 | impl NameOrNameRef { |
379 | pub fn text(&self) -> SmolStr { | 382 | pub fn text(&self) -> TokenText { |
380 | match self { | 383 | match self { |
381 | NameOrNameRef::Name(name) => name.text(), | 384 | NameOrNameRef::Name(name) => name.text(), |
382 | NameOrNameRef::NameRef(name_ref) => name_ref.text(), | 385 | NameOrNameRef::NameRef(name_ref) => name_ref.text(), |
diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 2a5c61171..90de6bef6 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs | |||
@@ -29,6 +29,7 @@ mod syntax_error; | |||
29 | mod parsing; | 29 | mod parsing; |
30 | mod validation; | 30 | mod validation; |
31 | mod ptr; | 31 | mod ptr; |
32 | mod token_text; | ||
32 | #[cfg(test)] | 33 | #[cfg(test)] |
33 | mod tests; | 34 | mod tests; |
34 | 35 | ||
@@ -55,6 +56,7 @@ pub use crate::{ | |||
55 | SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren, SyntaxToken, | 56 | SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren, SyntaxToken, |
56 | SyntaxTreeBuilder, | 57 | SyntaxTreeBuilder, |
57 | }, | 58 | }, |
59 | token_text::TokenText, | ||
58 | }; | 60 | }; |
59 | pub use parser::{SyntaxKind, T}; | 61 | pub use parser::{SyntaxKind, T}; |
60 | pub use rowan::{ | 62 | pub use rowan::{ |
diff --git a/crates/syntax/src/token_text.rs b/crates/syntax/src/token_text.rs new file mode 100644 index 000000000..d2ed0a12a --- /dev/null +++ b/crates/syntax/src/token_text.rs | |||
@@ -0,0 +1,77 @@ | |||
1 | //! Yet another version of owned string, backed by a syntax tree token. | ||
2 | |||
3 | use std::{cmp::Ordering, fmt, ops}; | ||
4 | |||
5 | pub struct TokenText(pub(crate) rowan::GreenToken); | ||
6 | |||
7 | impl TokenText { | ||
8 | pub fn as_str(&self) -> &str { | ||
9 | self.0.text() | ||
10 | } | ||
11 | } | ||
12 | |||
13 | impl ops::Deref for TokenText { | ||
14 | type Target = str; | ||
15 | |||
16 | fn deref(&self) -> &str { | ||
17 | self.as_str() | ||
18 | } | ||
19 | } | ||
20 | impl AsRef<str> for TokenText { | ||
21 | fn as_ref(&self) -> &str { | ||
22 | self.as_str() | ||
23 | } | ||
24 | } | ||
25 | |||
26 | impl From<TokenText> for String { | ||
27 | fn from(token_text: TokenText) -> Self { | ||
28 | token_text.as_str().into() | ||
29 | } | ||
30 | } | ||
31 | |||
32 | impl PartialEq<&'_ str> for TokenText { | ||
33 | fn eq(&self, other: &&str) -> bool { | ||
34 | self.as_str() == *other | ||
35 | } | ||
36 | } | ||
37 | impl PartialEq<TokenText> for &'_ str { | ||
38 | fn eq(&self, other: &TokenText) -> bool { | ||
39 | other == self | ||
40 | } | ||
41 | } | ||
42 | impl PartialEq<String> for TokenText { | ||
43 | fn eq(&self, other: &String) -> bool { | ||
44 | self.as_str() == other.as_str() | ||
45 | } | ||
46 | } | ||
47 | impl PartialEq<TokenText> for String { | ||
48 | fn eq(&self, other: &TokenText) -> bool { | ||
49 | other == self | ||
50 | } | ||
51 | } | ||
52 | impl PartialEq for TokenText { | ||
53 | fn eq(&self, other: &TokenText) -> bool { | ||
54 | self.as_str() == other.as_str() | ||
55 | } | ||
56 | } | ||
57 | impl Eq for TokenText {} | ||
58 | impl Ord for TokenText { | ||
59 | fn cmp(&self, other: &Self) -> Ordering { | ||
60 | self.as_str().cmp(other.as_str()) | ||
61 | } | ||
62 | } | ||
63 | impl PartialOrd for TokenText { | ||
64 | fn partial_cmp(&self, other: &Self) -> Option<Ordering> { | ||
65 | Some(self.cmp(other)) | ||
66 | } | ||
67 | } | ||
68 | impl fmt::Display for TokenText { | ||
69 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
70 | fmt::Display::fmt(self.as_str(), f) | ||
71 | } | ||
72 | } | ||
73 | impl fmt::Debug for TokenText { | ||
74 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
75 | fmt::Debug::fmt(self.as_str(), f) | ||
76 | } | ||
77 | } | ||
diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index 36a86e78f..b1beeb883 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc | |||
@@ -178,6 +178,15 @@ $ cargo xtask install --server | |||
178 | If your editor can't find the binary even though the binary is on your `$PATH`, the likely explanation is that it doesn't see the same `$PATH` as the shell, see https://github.com/rust-analyzer/rust-analyzer/issues/1811[this issue]. | 178 | If your editor can't find the binary even though the binary is on your `$PATH`, the likely explanation is that it doesn't see the same `$PATH` as the shell, see https://github.com/rust-analyzer/rust-analyzer/issues/1811[this issue]. |
179 | On Unix, running the editor from a shell or changing the `.desktop` file to set the environment should help. | 179 | On Unix, running the editor from a shell or changing the `.desktop` file to set the environment should help. |
180 | 180 | ||
181 | ==== `rustup` | ||
182 | |||
183 | `rust-analyzer` is available in `rustup`, but only in the nightly toolchain: | ||
184 | |||
185 | [source,bash] | ||
186 | --- | ||
187 | $ rustup +nightly component add rust-analyzer-preview | ||
188 | --- | ||
189 | |||
181 | ==== Arch Linux | 190 | ==== Arch Linux |
182 | 191 | ||
183 | The `rust-analyzer` binary can be installed from the repos or AUR (Arch User Repository): | 192 | The `rust-analyzer` binary can be installed from the repos or AUR (Arch User Repository): |