aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--README.md2
-rw-r--r--crates/base_db/src/lib.rs3
-rw-r--r--crates/hir_ty/src/diagnostics/match_check.rs40
-rw-r--r--crates/hir_ty/src/infer/expr.rs2
-rw-r--r--crates/ide/src/diagnostics.rs1
-rw-r--r--crates/ide/src/doc_links.rs63
-rw-r--r--crates/ide/src/folding_ranges.rs39
-rw-r--r--crates/ide/src/goto_definition.rs15
-rw-r--r--crates/ide/src/hover.rs1
-rw-r--r--crates/ide/src/move_item.rs268
-rw-r--r--crates/ide/src/syntax_highlighting/inject.rs32
-rw-r--r--crates/ide_db/src/defs.rs41
-rw-r--r--crates/profile/src/google_cpu_profiler.rs19
-rw-r--r--crates/profile/src/hprof.rs107
-rw-r--r--crates/profile/src/lib.rs39
-rw-r--r--crates/rust-analyzer/src/benchmarks.rs71
-rw-r--r--crates/rust-analyzer/src/bin/flags.rs53
-rw-r--r--crates/rust-analyzer/src/bin/main.rs13
-rw-r--r--crates/rust-analyzer/src/cli.rs4
-rw-r--r--crates/rust-analyzer/src/cli/analysis_bench.rs196
-rw-r--r--crates/rust-analyzer/src/lib.rs3
-rw-r--r--crates/rust-analyzer/src/to_proto.rs6
-rw-r--r--docs/dev/README.md72
-rw-r--r--docs/user/manual.adoc19
24 files changed, 646 insertions, 463 deletions
diff --git a/README.md b/README.md
index 74f944ae4..085c8c0f6 100644
--- a/README.md
+++ b/README.md
@@ -4,7 +4,7 @@
4 alt="rust-analyzer logo"> 4 alt="rust-analyzer logo">
5</p> 5</p>
6 6
7rust-analyzer is an **experimental** modular compiler frontend for the Rust language. 7rust-analyzer is a modular compiler frontend for the Rust language.
8It is a part of a larger rls-2.0 effort to create excellent IDE support for Rust. 8It is a part of a larger rls-2.0 effort to create excellent IDE support for Rust.
9 9
10Work on rust-analyzer is sponsored by 10Work on rust-analyzer is sponsored by
diff --git a/crates/base_db/src/lib.rs b/crates/base_db/src/lib.rs
index 5f77a0b1f..980a0ed98 100644
--- a/crates/base_db/src/lib.rs
+++ b/crates/base_db/src/lib.rs
@@ -59,6 +59,8 @@ pub trait CheckCanceled {
59 Self: Sized + panic::RefUnwindSafe, 59 Self: Sized + panic::RefUnwindSafe,
60 F: FnOnce(&Self) -> T + panic::UnwindSafe, 60 F: FnOnce(&Self) -> T + panic::UnwindSafe,
61 { 61 {
62 // Uncomment to debug missing cancellations.
63 // let _span = profile::heartbeat_span();
62 panic::catch_unwind(|| f(self)).map_err(|err| match err.downcast::<Canceled>() { 64 panic::catch_unwind(|| f(self)).map_err(|err| match err.downcast::<Canceled>() {
63 Ok(canceled) => *canceled, 65 Ok(canceled) => *canceled,
64 Err(payload) => panic::resume_unwind(payload), 66 Err(payload) => panic::resume_unwind(payload),
@@ -68,6 +70,7 @@ pub trait CheckCanceled {
68 70
69impl<T: salsa::Database> CheckCanceled for T { 71impl<T: salsa::Database> CheckCanceled for T {
70 fn check_canceled(&self) { 72 fn check_canceled(&self) {
73 // profile::heartbeat();
71 if self.salsa_runtime().is_current_revision_canceled() { 74 if self.salsa_runtime().is_current_revision_canceled() {
72 Canceled::throw() 75 Canceled::throw()
73 } 76 }
diff --git a/crates/hir_ty/src/diagnostics/match_check.rs b/crates/hir_ty/src/diagnostics/match_check.rs
index 5a5cdcbf3..9cb472b51 100644
--- a/crates/hir_ty/src/diagnostics/match_check.rs
+++ b/crates/hir_ty/src/diagnostics/match_check.rs
@@ -539,7 +539,7 @@ impl Matrix {
539 if let Some(Pat::Or(pat_ids)) = row.get_head().map(|pat_id| pat_id.as_pat(cx)) { 539 if let Some(Pat::Or(pat_ids)) = row.get_head().map(|pat_id| pat_id.as_pat(cx)) {
540 // Or patterns are expanded here 540 // Or patterns are expanded here
541 for pat_id in pat_ids { 541 for pat_id in pat_ids {
542 self.0.push(PatStack::from_pattern(pat_id)); 542 self.0.push(row.replace_head_with([pat_id].iter()));
543 } 543 }
544 } else { 544 } else {
545 self.0.push(row); 545 self.0.push(row);
@@ -1085,6 +1085,20 @@ fn main() {
1085 } 1085 }
1086 1086
1087 #[test] 1087 #[test]
1088 fn or_pattern_no_diagnostic() {
1089 check_diagnostics(
1090 r#"
1091enum Either {A, B}
1092
1093fn main() {
1094 match (Either::A, Either::B) {
1095 (Either::A | Either::B, _) => (),
1096 }
1097}"#,
1098 )
1099 }
1100
1101 #[test]
1088 fn mismatched_types() { 1102 fn mismatched_types() {
1089 // Match statements with arms that don't match the 1103 // Match statements with arms that don't match the
1090 // expression pattern do not fire this diagnostic. 1104 // expression pattern do not fire this diagnostic.
@@ -1336,30 +1350,6 @@ fn bang(never: !) {
1336 } 1350 }
1337 1351
1338 #[test] 1352 #[test]
1339 fn or_pattern_panic() {
1340 check_diagnostics(
1341 r#"
1342pub enum Category { Infinity, Zero }
1343
1344fn panic(a: Category, b: Category) {
1345 match (a, b) {
1346 (Category::Zero | Category::Infinity, _) => (),
1347 (_, Category::Zero | Category::Infinity) => (),
1348 }
1349
1350 // FIXME: This is a false positive, but the code used to cause a panic in the match checker,
1351 // so this acts as a regression test for that.
1352 match (a, b) {
1353 //^^^^^^ Missing match arm
1354 (Category::Infinity, Category::Infinity) | (Category::Zero, Category::Zero) => (),
1355 (Category::Infinity | Category::Zero, _) => (),
1356 }
1357}
1358"#,
1359 );
1360 }
1361
1362 #[test]
1363 fn unknown_type() { 1353 fn unknown_type() {
1364 check_diagnostics( 1354 check_diagnostics(
1365 r#" 1355 r#"
diff --git a/crates/hir_ty/src/infer/expr.rs b/crates/hir_ty/src/infer/expr.rs
index e6ede05ca..6279aa572 100644
--- a/crates/hir_ty/src/infer/expr.rs
+++ b/crates/hir_ty/src/infer/expr.rs
@@ -119,6 +119,8 @@ impl<'a> InferenceContext<'a> {
119 } 119 }
120 120
121 fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { 121 fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
122 self.db.check_canceled();
123
122 let body = Arc::clone(&self.body); // avoid borrow checker problem 124 let body = Arc::clone(&self.body); // avoid borrow checker problem
123 let ty = match &body[tgt_expr] { 125 let ty = match &body[tgt_expr] {
124 Expr::Missing => self.err_ty(), 126 Expr::Missing => self.err_ty(),
diff --git a/crates/ide/src/diagnostics.rs b/crates/ide/src/diagnostics.rs
index 22697a537..dd42116a7 100644
--- a/crates/ide/src/diagnostics.rs
+++ b/crates/ide/src/diagnostics.rs
@@ -165,7 +165,6 @@ pub(crate) fn diagnostics(
165 sema.diagnostics_display_range(d.display_source()).range, 165 sema.diagnostics_display_range(d.display_source()).range,
166 d.message(), 166 d.message(),
167 ) 167 )
168 .with_unused(true)
169 .with_fix(d.fix(&sema)) 168 .with_fix(d.fix(&sema))
170 .with_code(Some(d.code())), 169 .with_code(Some(d.code())),
171 ); 170 );
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs
index 4a9e8b21c..67e2e5a1c 100644
--- a/crates/ide/src/doc_links.rs
+++ b/crates/ide/src/doc_links.rs
@@ -98,6 +98,29 @@ pub(crate) fn remove_links(markdown: &str) -> String {
98 out 98 out
99} 99}
100 100
101/// Retrieve a link to documentation for the given symbol.
102pub(crate) fn external_docs(
103 db: &RootDatabase,
104 position: &FilePosition,
105) -> Option<DocumentationLink> {
106 let sema = Semantics::new(db);
107 let file = sema.parse(position.file_id).syntax().clone();
108 let token = pick_best(file.token_at_offset(position.offset))?;
109 let token = sema.descend_into_macros(token);
110
111 let node = token.parent()?;
112 let definition = match_ast! {
113 match node {
114 ast::NameRef(name_ref) => NameRefClass::classify(&sema, &name_ref).map(|d| d.referenced(sema.db)),
115 ast::Name(name) => NameClass::classify(&sema, &name).map(|d| d.referenced_or_defined(sema.db)),
116 _ => None,
117 }
118 };
119
120 get_doc_link(db, definition?)
121}
122
123/// Extracts all links from a given markdown text.
101pub(crate) fn extract_definitions_from_markdown( 124pub(crate) fn extract_definitions_from_markdown(
102 markdown: &str, 125 markdown: &str,
103) -> Vec<(Range<usize>, String, Option<hir::Namespace>)> { 126) -> Vec<(Range<usize>, String, Option<hir::Namespace>)> {
@@ -178,15 +201,15 @@ pub(crate) fn resolve_doc_path_for_def(
178) -> Option<hir::ModuleDef> { 201) -> Option<hir::ModuleDef> {
179 match def { 202 match def {
180 Definition::ModuleDef(def) => match def { 203 Definition::ModuleDef(def) => match def {
181 ModuleDef::Module(it) => it.resolve_doc_path(db, &link, ns), 204 hir::ModuleDef::Module(it) => it.resolve_doc_path(db, &link, ns),
182 ModuleDef::Function(it) => it.resolve_doc_path(db, &link, ns), 205 hir::ModuleDef::Function(it) => it.resolve_doc_path(db, &link, ns),
183 ModuleDef::Adt(it) => it.resolve_doc_path(db, &link, ns), 206 hir::ModuleDef::Adt(it) => it.resolve_doc_path(db, &link, ns),
184 ModuleDef::Variant(it) => it.resolve_doc_path(db, &link, ns), 207 hir::ModuleDef::Variant(it) => it.resolve_doc_path(db, &link, ns),
185 ModuleDef::Const(it) => it.resolve_doc_path(db, &link, ns), 208 hir::ModuleDef::Const(it) => it.resolve_doc_path(db, &link, ns),
186 ModuleDef::Static(it) => it.resolve_doc_path(db, &link, ns), 209 hir::ModuleDef::Static(it) => it.resolve_doc_path(db, &link, ns),
187 ModuleDef::Trait(it) => it.resolve_doc_path(db, &link, ns), 210 hir::ModuleDef::Trait(it) => it.resolve_doc_path(db, &link, ns),
188 ModuleDef::TypeAlias(it) => it.resolve_doc_path(db, &link, ns), 211 hir::ModuleDef::TypeAlias(it) => it.resolve_doc_path(db, &link, ns),
189 ModuleDef::BuiltinType(_) => None, 212 hir::ModuleDef::BuiltinType(_) => None,
190 }, 213 },
191 Definition::Macro(it) => it.resolve_doc_path(db, &link, ns), 214 Definition::Macro(it) => it.resolve_doc_path(db, &link, ns),
192 Definition::Field(it) => it.resolve_doc_path(db, &link, ns), 215 Definition::Field(it) => it.resolve_doc_path(db, &link, ns),
@@ -328,28 +351,6 @@ fn rewrite_url_link(db: &RootDatabase, def: ModuleDef, target: &str) -> Option<S
328 .map(|url| url.into_string()) 351 .map(|url| url.into_string())
329} 352}
330 353
331/// Retrieve a link to documentation for the given symbol.
332pub(crate) fn external_docs(
333 db: &RootDatabase,
334 position: &FilePosition,
335) -> Option<DocumentationLink> {
336 let sema = Semantics::new(db);
337 let file = sema.parse(position.file_id).syntax().clone();
338 let token = pick_best(file.token_at_offset(position.offset))?;
339 let token = sema.descend_into_macros(token);
340
341 let node = token.parent()?;
342 let definition = match_ast! {
343 match node {
344 ast::NameRef(name_ref) => NameRefClass::classify(&sema, &name_ref).map(|d| d.referenced(sema.db)),
345 ast::Name(name) => NameClass::classify(&sema, &name).map(|d| d.referenced_or_defined(sema.db)),
346 _ => None,
347 }
348 };
349
350 get_doc_link(db, definition?)
351}
352
353/// Rewrites a markdown document, applying 'callback' to each link. 354/// Rewrites a markdown document, applying 'callback' to each link.
354fn map_links<'e>( 355fn map_links<'e>(
355 events: impl Iterator<Item = Event<'e>>, 356 events: impl Iterator<Item = Event<'e>>,
diff --git a/crates/ide/src/folding_ranges.rs b/crates/ide/src/folding_ranges.rs
index 4b1b24562..153726ce8 100644
--- a/crates/ide/src/folding_ranges.rs
+++ b/crates/ide/src/folding_ranges.rs
@@ -17,6 +17,8 @@ pub enum FoldKind {
17 Block, 17 Block,
18 ArgList, 18 ArgList,
19 Region, 19 Region,
20 Consts,
21 Statics,
20} 22}
21 23
22#[derive(Debug)] 24#[derive(Debug)]
@@ -30,6 +32,8 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
30 let mut visited_comments = FxHashSet::default(); 32 let mut visited_comments = FxHashSet::default();
31 let mut visited_imports = FxHashSet::default(); 33 let mut visited_imports = FxHashSet::default();
32 let mut visited_mods = FxHashSet::default(); 34 let mut visited_mods = FxHashSet::default();
35 let mut visited_consts = FxHashSet::default();
36 let mut visited_statics = FxHashSet::default();
33 // regions can be nested, here is a LIFO buffer 37 // regions can be nested, here is a LIFO buffer
34 let mut regions_starts: Vec<TextSize> = vec![]; 38 let mut regions_starts: Vec<TextSize> = vec![];
35 39
@@ -91,6 +95,19 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
91 res.push(Fold { range, kind: FoldKind::Mods }) 95 res.push(Fold { range, kind: FoldKind::Mods })
92 } 96 }
93 } 97 }
98
99 // Fold groups of consts
100 if node.kind() == CONST && !visited_consts.contains(&node) {
101 if let Some(range) = contiguous_range_for_group(&node, &mut visited_consts) {
102 res.push(Fold { range, kind: FoldKind::Consts })
103 }
104 }
105 // Fold groups of consts
106 if node.kind() == STATIC && !visited_statics.contains(&node) {
107 if let Some(range) = contiguous_range_for_group(&node, &mut visited_statics) {
108 res.push(Fold { range, kind: FoldKind::Statics })
109 }
110 }
94 } 111 }
95 } 112 }
96 } 113 }
@@ -250,6 +267,8 @@ mod tests {
250 FoldKind::Block => "block", 267 FoldKind::Block => "block",
251 FoldKind::ArgList => "arglist", 268 FoldKind::ArgList => "arglist",
252 FoldKind::Region => "region", 269 FoldKind::Region => "region",
270 FoldKind::Consts => "consts",
271 FoldKind::Statics => "statics",
253 }; 272 };
254 assert_eq!(kind, &attr.unwrap()); 273 assert_eq!(kind, &attr.unwrap());
255 } 274 }
@@ -457,4 +476,24 @@ calling_function(x,y);
457"#, 476"#,
458 ) 477 )
459 } 478 }
479
480 #[test]
481 fn fold_consecutive_const() {
482 check(
483 r#"
484<fold consts>const FIRST_CONST: &str = "first";
485const SECOND_CONST: &str = "second";</fold>
486 "#,
487 )
488 }
489
490 #[test]
491 fn fold_consecutive_static() {
492 check(
493 r#"
494<fold statics>static FIRST_STATIC: &str = "first";
495static SECOND_STATIC: &str = "second";</fold>
496 "#,
497 )
498 }
460} 499}
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs
index a2c97061f..c6556c487 100644
--- a/crates/ide/src/goto_definition.rs
+++ b/crates/ide/src/goto_definition.rs
@@ -918,6 +918,21 @@ fn f() -> impl Iterator<Item$0 = u8> {}
918 } 918 }
919 919
920 #[test] 920 #[test]
921 #[should_panic = "unresolved reference"]
922 fn unknown_assoc_ty() {
923 check(
924 r#"
925trait Iterator {
926 type Item;
927 //^^^^
928}
929
930fn f() -> impl Iterator<Invalid$0 = u8> {}
931 "#,
932 )
933 }
934
935 #[test]
921 fn goto_def_for_assoc_ty_in_path_multiple() { 936 fn goto_def_for_assoc_ty_in_path_multiple() {
922 check( 937 check(
923 r#" 938 r#"
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs
index dfd32c8c1..7e35a1450 100644
--- a/crates/ide/src/hover.rs
+++ b/crates/ide/src/hover.rs
@@ -470,6 +470,7 @@ fn find_std_module(famous_defs: &FamousDefs, name: &str) -> Option<hir::Module>
470 470
471fn pick_best(tokens: TokenAtOffset<SyntaxToken>) -> Option<SyntaxToken> { 471fn pick_best(tokens: TokenAtOffset<SyntaxToken>) -> Option<SyntaxToken> {
472 return tokens.max_by_key(priority); 472 return tokens.max_by_key(priority);
473
473 fn priority(n: &SyntaxToken) -> usize { 474 fn priority(n: &SyntaxToken) -> usize {
474 match n.kind() { 475 match n.kind() {
475 IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] => 3, 476 IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] => 3,
diff --git a/crates/ide/src/move_item.rs b/crates/ide/src/move_item.rs
index 05fa8fc13..d36dcd4e4 100644
--- a/crates/ide/src/move_item.rs
+++ b/crates/ide/src/move_item.rs
@@ -4,10 +4,12 @@ use hir::Semantics;
4use ide_db::{base_db::FileRange, RootDatabase}; 4use ide_db::{base_db::FileRange, RootDatabase};
5use itertools::Itertools; 5use itertools::Itertools;
6use syntax::{ 6use syntax::{
7 algo, ast, match_ast, AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, 7 algo, ast, match_ast, AstNode, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange,
8 TokenAtOffset,
8}; 9};
9use text_edit::{TextEdit, TextEditBuilder}; 10use text_edit::{TextEdit, TextEditBuilder};
10 11
12#[derive(Copy, Clone, Debug)]
11pub enum Direction { 13pub enum Direction {
12 Up, 14 Up,
13 Down, 15 Down,
@@ -31,14 +33,19 @@ pub(crate) fn move_item(
31 let sema = Semantics::new(db); 33 let sema = Semantics::new(db);
32 let file = sema.parse(range.file_id); 34 let file = sema.parse(range.file_id);
33 35
34 let item = file.syntax().covering_element(range.range); 36 let item = if range.range.is_empty() {
37 SyntaxElement::Token(pick_best(file.syntax().token_at_offset(range.range.start()))?)
38 } else {
39 file.syntax().covering_element(range.range)
40 };
41
35 find_ancestors(item, direction, range.range) 42 find_ancestors(item, direction, range.range)
36} 43}
37 44
38fn find_ancestors(item: SyntaxElement, direction: Direction, range: TextRange) -> Option<TextEdit> { 45fn find_ancestors(item: SyntaxElement, direction: Direction, range: TextRange) -> Option<TextEdit> {
39 let root = match item { 46 let root = match item {
40 NodeOrToken::Node(node) => node, 47 SyntaxElement::Node(node) => node,
41 NodeOrToken::Token(token) => token.parent()?, 48 SyntaxElement::Token(token) => token.parent()?,
42 }; 49 };
43 50
44 let movable = [ 51 let movable = [
@@ -51,6 +58,11 @@ fn find_ancestors(item: SyntaxElement, direction: Direction, range: TextRange) -
51 SyntaxKind::PARAM, 58 SyntaxKind::PARAM,
52 SyntaxKind::LET_STMT, 59 SyntaxKind::LET_STMT,
53 SyntaxKind::EXPR_STMT, 60 SyntaxKind::EXPR_STMT,
61 SyntaxKind::IF_EXPR,
62 SyntaxKind::FOR_EXPR,
63 SyntaxKind::LOOP_EXPR,
64 SyntaxKind::WHILE_EXPR,
65 SyntaxKind::RETURN_EXPR,
54 SyntaxKind::MATCH_EXPR, 66 SyntaxKind::MATCH_EXPR,
55 SyntaxKind::MACRO_CALL, 67 SyntaxKind::MACRO_CALL,
56 SyntaxKind::TYPE_ALIAS, 68 SyntaxKind::TYPE_ALIAS,
@@ -83,11 +95,11 @@ fn move_in_direction(
83) -> Option<TextEdit> { 95) -> Option<TextEdit> {
84 match_ast! { 96 match_ast! {
85 match node { 97 match node {
86 ast::ArgList(it) => swap_sibling_in_list(it.args(), range, direction), 98 ast::ArgList(it) => swap_sibling_in_list(node, it.args(), range, direction),
87 ast::GenericParamList(it) => swap_sibling_in_list(it.generic_params(), range, direction), 99 ast::GenericParamList(it) => swap_sibling_in_list(node, it.generic_params(), range, direction),
88 ast::GenericArgList(it) => swap_sibling_in_list(it.generic_args(), range, direction), 100 ast::GenericArgList(it) => swap_sibling_in_list(node, it.generic_args(), range, direction),
89 ast::VariantList(it) => swap_sibling_in_list(it.variants(), range, direction), 101 ast::VariantList(it) => swap_sibling_in_list(node, it.variants(), range, direction),
90 ast::TypeBoundList(it) => swap_sibling_in_list(it.bounds(), range, direction), 102 ast::TypeBoundList(it) => swap_sibling_in_list(node, it.bounds(), range, direction),
91 _ => Some(replace_nodes(node, &match direction { 103 _ => Some(replace_nodes(node, &match direction {
92 Direction::Up => node.prev_sibling(), 104 Direction::Up => node.prev_sibling(),
93 Direction::Down => node.next_sibling(), 105 Direction::Down => node.next_sibling(),
@@ -97,19 +109,27 @@ fn move_in_direction(
97} 109}
98 110
99fn swap_sibling_in_list<A: AstNode + Clone, I: Iterator<Item = A>>( 111fn swap_sibling_in_list<A: AstNode + Clone, I: Iterator<Item = A>>(
112 node: &SyntaxNode,
100 list: I, 113 list: I,
101 range: TextRange, 114 range: TextRange,
102 direction: Direction, 115 direction: Direction,
103) -> Option<TextEdit> { 116) -> Option<TextEdit> {
104 let (l, r) = list 117 let list_lookup = list
105 .tuple_windows() 118 .tuple_windows()
106 .filter(|(l, r)| match direction { 119 .filter(|(l, r)| match direction {
107 Direction::Up => r.syntax().text_range().contains_range(range), 120 Direction::Up => r.syntax().text_range().contains_range(range),
108 Direction::Down => l.syntax().text_range().contains_range(range), 121 Direction::Down => l.syntax().text_range().contains_range(range),
109 }) 122 })
110 .next()?; 123 .next();
111 124
112 Some(replace_nodes(l.syntax(), r.syntax())) 125 if let Some((l, r)) = list_lookup {
126 Some(replace_nodes(l.syntax(), r.syntax()))
127 } else {
128 // Cursor is beyond any movable list item (for example, on curly brace in enum).
129 // It's not necessary, that parent of list is movable (arg list's parent is not, for example),
130 // and we have to continue tree traversal to find suitable node.
131 find_ancestors(SyntaxElement::Node(node.parent()?), direction, range)
132 }
113} 133}
114 134
115fn replace_nodes(first: &SyntaxNode, second: &SyntaxNode) -> TextEdit { 135fn replace_nodes(first: &SyntaxNode, second: &SyntaxNode) -> TextEdit {
@@ -121,6 +141,18 @@ fn replace_nodes(first: &SyntaxNode, second: &SyntaxNode) -> TextEdit {
121 edit.finish() 141 edit.finish()
122} 142}
123 143
144fn pick_best(tokens: TokenAtOffset<SyntaxToken>) -> Option<SyntaxToken> {
145 return tokens.max_by_key(priority);
146
147 fn priority(n: &SyntaxToken) -> usize {
148 match n.kind() {
149 SyntaxKind::IDENT | SyntaxKind::LIFETIME_IDENT => 2,
150 kind if kind.is_trivia() => 0,
151 _ => 1,
152 }
153 }
154}
155
124#[cfg(test)] 156#[cfg(test)]
125mod tests { 157mod tests {
126 use crate::fixture; 158 use crate::fixture;
@@ -265,6 +297,107 @@ fn main() {
265 "#]], 297 "#]],
266 Direction::Up, 298 Direction::Up,
267 ); 299 );
300 check(
301 r#"
302fn main() {
303 println!("Hello, world");
304
305 if true {
306 println!("Test");
307 }$0$0
308}
309 "#,
310 expect![[r#"
311fn main() {
312 if true {
313 println!("Test");
314 }
315
316 println!("Hello, world");
317}
318 "#]],
319 Direction::Up,
320 );
321 check(
322 r#"
323fn main() {
324 println!("Hello, world");
325
326 for i in 0..10 {
327 println!("Test");
328 }$0$0
329}
330 "#,
331 expect![[r#"
332fn main() {
333 for i in 0..10 {
334 println!("Test");
335 }
336
337 println!("Hello, world");
338}
339 "#]],
340 Direction::Up,
341 );
342 check(
343 r#"
344fn main() {
345 println!("Hello, world");
346
347 loop {
348 println!("Test");
349 }$0$0
350}
351 "#,
352 expect![[r#"
353fn main() {
354 loop {
355 println!("Test");
356 }
357
358 println!("Hello, world");
359}
360 "#]],
361 Direction::Up,
362 );
363 check(
364 r#"
365fn main() {
366 println!("Hello, world");
367
368 while true {
369 println!("Test");
370 }$0$0
371}
372 "#,
373 expect![[r#"
374fn main() {
375 while true {
376 println!("Test");
377 }
378
379 println!("Hello, world");
380}
381 "#]],
382 Direction::Up,
383 );
384 check(
385 r#"
386fn main() {
387 println!("Hello, world");
388
389 return 123;$0$0
390}
391 "#,
392 expect![[r#"
393fn main() {
394 return 123;
395
396 println!("Hello, world");
397}
398 "#]],
399 Direction::Up,
400 );
268 } 401 }
269 402
270 #[test] 403 #[test]
@@ -615,6 +748,115 @@ fn test() {
615 } 748 }
616 749
617 #[test] 750 #[test]
751 fn test_cursor_at_item_start() {
752 check(
753 r#"
754$0$0#[derive(Debug)]
755enum FooBar {
756 Foo,
757 Bar,
758}
759
760fn main() {}
761 "#,
762 expect![[r#"
763fn main() {}
764
765#[derive(Debug)]
766enum FooBar {
767 Foo,
768 Bar,
769}
770 "#]],
771 Direction::Down,
772 );
773 check(
774 r#"
775$0$0enum FooBar {
776 Foo,
777 Bar,
778}
779
780fn main() {}
781 "#,
782 expect![[r#"
783fn main() {}
784
785enum FooBar {
786 Foo,
787 Bar,
788}
789 "#]],
790 Direction::Down,
791 );
792 check(
793 r#"
794struct Test;
795
796trait SomeTrait {}
797
798$0$0impl SomeTrait for Test {}
799
800fn main() {}
801 "#,
802 expect![[r#"
803struct Test;
804
805impl SomeTrait for Test {}
806
807trait SomeTrait {}
808
809fn main() {}
810 "#]],
811 Direction::Up,
812 );
813 }
814
815 #[test]
816 fn test_cursor_at_item_end() {
817 check(
818 r#"
819enum FooBar {
820 Foo,
821 Bar,
822}$0$0
823
824fn main() {}
825 "#,
826 expect![[r#"
827fn main() {}
828
829enum FooBar {
830 Foo,
831 Bar,
832}
833 "#]],
834 Direction::Down,
835 );
836 check(
837 r#"
838struct Test;
839
840trait SomeTrait {}
841
842impl SomeTrait for Test {}$0$0
843
844fn main() {}
845 "#,
846 expect![[r#"
847struct Test;
848
849impl SomeTrait for Test {}
850
851trait SomeTrait {}
852
853fn main() {}
854 "#]],
855 Direction::Up,
856 );
857 }
858
859 #[test]
618 fn handles_empty_file() { 860 fn handles_empty_file() {
619 check(r#"$0$0"#, expect![[r#""#]], Direction::Up); 861 check(r#"$0$0"#, expect![[r#""#]], Direction::Up);
620 } 862 }
diff --git a/crates/ide/src/syntax_highlighting/inject.rs b/crates/ide/src/syntax_highlighting/inject.rs
index 963c3fb59..b62d43256 100644
--- a/crates/ide/src/syntax_highlighting/inject.rs
+++ b/crates/ide/src/syntax_highlighting/inject.rs
@@ -11,7 +11,8 @@ use syntax::{
11}; 11};
12 12
13use crate::{ 13use crate::{
14 doc_links::extract_definitions_from_markdown, Analysis, HlMod, HlRange, HlTag, RootDatabase, 14 doc_links::{extract_definitions_from_markdown, resolve_doc_path_for_def},
15 Analysis, HlMod, HlRange, HlTag, RootDatabase,
15}; 16};
16 17
17use super::{highlights::Highlights, injector::Injector}; 18use super::{highlights::Highlights, injector::Injector};
@@ -190,7 +191,7 @@ pub(super) fn doc_comment(
190 extract_definitions_from_markdown(line) 191 extract_definitions_from_markdown(line)
191 .into_iter() 192 .into_iter()
192 .filter_map(|(range, link, ns)| { 193 .filter_map(|(range, link, ns)| {
193 Some(range).zip(validate_intra_doc_link(sema.db, &def, &link, ns)) 194 Some(range).zip(resolve_doc_path_for_def(sema.db, def, &link, ns))
194 }) 195 })
195 .map(|(Range { start, end }, def)| { 196 .map(|(Range { start, end }, def)| {
196 ( 197 (
@@ -283,33 +284,6 @@ fn find_doc_string_in_attr(attr: &hir::Attr, it: &ast::Attr) -> Option<ast::Stri
283 } 284 }
284} 285}
285 286
286fn validate_intra_doc_link(
287 db: &RootDatabase,
288 def: &Definition,
289 link: &str,
290 ns: Option<hir::Namespace>,
291) -> Option<hir::ModuleDef> {
292 match def {
293 Definition::ModuleDef(def) => match def {
294 hir::ModuleDef::Module(it) => it.resolve_doc_path(db, &link, ns),
295 hir::ModuleDef::Function(it) => it.resolve_doc_path(db, &link, ns),
296 hir::ModuleDef::Adt(it) => it.resolve_doc_path(db, &link, ns),
297 hir::ModuleDef::Variant(it) => it.resolve_doc_path(db, &link, ns),
298 hir::ModuleDef::Const(it) => it.resolve_doc_path(db, &link, ns),
299 hir::ModuleDef::Static(it) => it.resolve_doc_path(db, &link, ns),
300 hir::ModuleDef::Trait(it) => it.resolve_doc_path(db, &link, ns),
301 hir::ModuleDef::TypeAlias(it) => it.resolve_doc_path(db, &link, ns),
302 hir::ModuleDef::BuiltinType(_) => None,
303 },
304 Definition::Macro(it) => it.resolve_doc_path(db, &link, ns),
305 Definition::Field(it) => it.resolve_doc_path(db, &link, ns),
306 Definition::SelfType(_)
307 | Definition::Local(_)
308 | Definition::GenericParam(_)
309 | Definition::Label(_) => None,
310 }
311}
312
313fn module_def_to_hl_tag(def: hir::ModuleDef) -> HlTag { 287fn module_def_to_hl_tag(def: hir::ModuleDef) -> HlTag {
314 let symbol = match def { 288 let symbol = match def {
315 hir::ModuleDef::Module(_) => SymbolKind::Module, 289 hir::ModuleDef::Module(_) => SymbolKind::Module,
diff --git a/crates/ide_db/src/defs.rs b/crates/ide_db/src/defs.rs
index 0d9808d24..de0dc2a40 100644
--- a/crates/ide_db/src/defs.rs
+++ b/crates/ide_db/src/defs.rs
@@ -330,25 +330,30 @@ impl NameRefClass {
330 } 330 }
331 } 331 }
332 332
333 if ast::AssocTypeArg::cast(parent.clone()).is_some() { 333 if let Some(assoc_type_arg) = ast::AssocTypeArg::cast(parent.clone()) {
334 // `Trait<Assoc = Ty>` 334 if assoc_type_arg.name_ref().as_ref() == Some(name_ref) {
335 // ^^^^^ 335 // `Trait<Assoc = Ty>`
336 let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?; 336 // ^^^^^
337 let resolved = sema.resolve_path(&path)?; 337 let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?;
338 if let PathResolution::Def(ModuleDef::Trait(tr)) = resolved { 338 let resolved = sema.resolve_path(&path)?;
339 if let Some(ty) = tr 339 if let PathResolution::Def(ModuleDef::Trait(tr)) = resolved {
340 .items(sema.db) 340 // FIXME: resolve in supertraits
341 .iter() 341 if let Some(ty) = tr
342 .filter_map(|assoc| match assoc { 342 .items(sema.db)
343 hir::AssocItem::TypeAlias(it) => Some(*it), 343 .iter()
344 _ => None, 344 .filter_map(|assoc| match assoc {
345 }) 345 hir::AssocItem::TypeAlias(it) => Some(*it),
346 .find(|alias| &alias.name(sema.db).to_string() == &name_ref.text()) 346 _ => None,
347 { 347 })
348 return Some(NameRefClass::Definition(Definition::ModuleDef( 348 .find(|alias| &alias.name(sema.db).to_string() == &name_ref.text())
349 ModuleDef::TypeAlias(ty), 349 {
350 ))); 350 return Some(NameRefClass::Definition(Definition::ModuleDef(
351 ModuleDef::TypeAlias(ty),
352 )));
353 }
351 } 354 }
355
356 return None;
352 } 357 }
353 } 358 }
354 359
diff --git a/crates/profile/src/google_cpu_profiler.rs b/crates/profile/src/google_cpu_profiler.rs
index db865c65b..cae6caeaa 100644
--- a/crates/profile/src/google_cpu_profiler.rs
+++ b/crates/profile/src/google_cpu_profiler.rs
@@ -14,26 +14,31 @@ extern "C" {
14 fn ProfilerStop(); 14 fn ProfilerStop();
15} 15}
16 16
17static PROFILER_STATE: AtomicUsize = AtomicUsize::new(OFF);
18const OFF: usize = 0; 17const OFF: usize = 0;
19const ON: usize = 1; 18const ON: usize = 1;
20const PENDING: usize = 2; 19const PENDING: usize = 2;
21 20
22pub fn start(path: &Path) { 21fn transition(current: usize, new: usize) -> bool {
23 if PROFILER_STATE.compare_and_swap(OFF, PENDING, Ordering::SeqCst) != OFF { 22 static STATE: AtomicUsize = AtomicUsize::new(OFF);
23
24 STATE.compare_exchange(current, new, Ordering::SeqCst, Ordering::SeqCst).is_ok()
25}
26
27pub(crate) fn start(path: &Path) {
28 if !transition(OFF, PENDING) {
24 panic!("profiler already started"); 29 panic!("profiler already started");
25 } 30 }
26 let path = CString::new(path.display().to_string()).unwrap(); 31 let path = CString::new(path.display().to_string()).unwrap();
27 if unsafe { ProfilerStart(path.as_ptr()) } == 0 { 32 if unsafe { ProfilerStart(path.as_ptr()) } == 0 {
28 panic!("profiler failed to start") 33 panic!("profiler failed to start")
29 } 34 }
30 assert!(PROFILER_STATE.compare_and_swap(PENDING, ON, Ordering::SeqCst) == PENDING); 35 assert!(transition(PENDING, ON));
31} 36}
32 37
33pub fn stop() { 38pub(crate) fn stop() {
34 if PROFILER_STATE.compare_and_swap(ON, PENDING, Ordering::SeqCst) != ON { 39 if !transition(ON, PENDING) {
35 panic!("profiler is not started") 40 panic!("profiler is not started")
36 } 41 }
37 unsafe { ProfilerStop() }; 42 unsafe { ProfilerStop() };
38 assert!(PROFILER_STATE.compare_and_swap(PENDING, OFF, Ordering::SeqCst) == PENDING); 43 assert!(transition(PENDING, OFF));
39} 44}
diff --git a/crates/profile/src/hprof.rs b/crates/profile/src/hprof.rs
index 29d2ed518..5fdb37206 100644
--- a/crates/profile/src/hprof.rs
+++ b/crates/profile/src/hprof.rs
@@ -1,5 +1,4 @@
1//! Simple hierarchical profiler 1//! Simple hierarchical profiler
2use once_cell::sync::Lazy;
3use std::{ 2use std::{
4 cell::RefCell, 3 cell::RefCell,
5 collections::{BTreeMap, HashSet}, 4 collections::{BTreeMap, HashSet},
@@ -12,6 +11,8 @@ use std::{
12 time::{Duration, Instant}, 11 time::{Duration, Instant},
13}; 12};
14 13
14use once_cell::sync::Lazy;
15
15use crate::tree::{Idx, Tree}; 16use crate::tree::{Idx, Tree};
16 17
17/// Filtering syntax 18/// Filtering syntax
@@ -56,18 +57,32 @@ type Label = &'static str;
56/// 0ms - profile 57/// 0ms - profile
57/// 0ms - profile2 58/// 0ms - profile2
58/// ``` 59/// ```
60#[inline]
59pub fn span(label: Label) -> ProfileSpan { 61pub fn span(label: Label) -> ProfileSpan {
60 assert!(!label.is_empty()); 62 debug_assert!(!label.is_empty());
61 63
62 if PROFILING_ENABLED.load(Ordering::Relaxed) 64 let enabled = PROFILING_ENABLED.load(Ordering::Relaxed);
63 && PROFILE_STACK.with(|stack| stack.borrow_mut().push(label)) 65 if enabled && with_profile_stack(|stack| stack.push(label)) {
64 {
65 ProfileSpan(Some(ProfilerImpl { label, detail: None })) 66 ProfileSpan(Some(ProfilerImpl { label, detail: None }))
66 } else { 67 } else {
67 ProfileSpan(None) 68 ProfileSpan(None)
68 } 69 }
69} 70}
70 71
72#[inline]
73pub fn heartbeat_span() -> HeartbeatSpan {
74 let enabled = PROFILING_ENABLED.load(Ordering::Relaxed);
75 HeartbeatSpan::new(enabled)
76}
77
78#[inline]
79pub fn heartbeat() {
80 let enabled = PROFILING_ENABLED.load(Ordering::Relaxed);
81 if enabled {
82 with_profile_stack(|it| it.heartbeat(1));
83 }
84}
85
71pub struct ProfileSpan(Option<ProfilerImpl>); 86pub struct ProfileSpan(Option<ProfilerImpl>);
72 87
73struct ProfilerImpl { 88struct ProfilerImpl {
@@ -85,20 +100,48 @@ impl ProfileSpan {
85} 100}
86 101
87impl Drop for ProfilerImpl { 102impl Drop for ProfilerImpl {
103 #[inline]
104 fn drop(&mut self) {
105 with_profile_stack(|it| it.pop(self.label, self.detail.take()));
106 }
107}
108
109pub struct HeartbeatSpan {
110 enabled: bool,
111}
112
113impl HeartbeatSpan {
114 #[inline]
115 pub fn new(enabled: bool) -> Self {
116 if enabled {
117 with_profile_stack(|it| it.heartbeats(true))
118 }
119 Self { enabled }
120 }
121}
122
123impl Drop for HeartbeatSpan {
88 fn drop(&mut self) { 124 fn drop(&mut self) {
89 PROFILE_STACK.with(|it| it.borrow_mut().pop(self.label, self.detail.take())); 125 if self.enabled {
126 with_profile_stack(|it| it.heartbeats(false))
127 }
90 } 128 }
91} 129}
92 130
93static PROFILING_ENABLED: AtomicBool = AtomicBool::new(false); 131static PROFILING_ENABLED: AtomicBool = AtomicBool::new(false);
94static FILTER: Lazy<RwLock<Filter>> = Lazy::new(Default::default); 132static FILTER: Lazy<RwLock<Filter>> = Lazy::new(Default::default);
95thread_local!(static PROFILE_STACK: RefCell<ProfileStack> = RefCell::new(ProfileStack::new())); 133
134fn with_profile_stack<T>(f: impl FnOnce(&mut ProfileStack) -> T) -> T {
135 thread_local!(static STACK: RefCell<ProfileStack> = RefCell::new(ProfileStack::new()));
136 STACK.with(|it| f(&mut *it.borrow_mut()))
137}
96 138
97#[derive(Default, Clone, Debug)] 139#[derive(Default, Clone, Debug)]
98struct Filter { 140struct Filter {
99 depth: usize, 141 depth: usize,
100 allowed: HashSet<String>, 142 allowed: HashSet<String>,
101 longer_than: Duration, 143 longer_than: Duration,
144 heartbeat_longer_than: Duration,
102 version: usize, 145 version: usize,
103} 146}
104 147
@@ -115,6 +158,7 @@ impl Filter {
115 } else { 158 } else {
116 Duration::new(0, 0) 159 Duration::new(0, 0)
117 }; 160 };
161 let heartbeat_longer_than = longer_than;
118 162
119 let depth = if let Some(idx) = spec.rfind('@') { 163 let depth = if let Some(idx) = spec.rfind('@') {
120 let depth: usize = spec[idx + 1..].parse().expect("invalid profile depth"); 164 let depth: usize = spec[idx + 1..].parse().expect("invalid profile depth");
@@ -125,7 +169,7 @@ impl Filter {
125 }; 169 };
126 let allowed = 170 let allowed =
127 if spec == "*" { HashSet::new() } else { spec.split('|').map(String::from).collect() }; 171 if spec == "*" { HashSet::new() } else { spec.split('|').map(String::from).collect() };
128 Filter { depth, allowed, longer_than, version: 0 } 172 Filter { depth, allowed, longer_than, heartbeat_longer_than, version: 0 }
129 } 173 }
130 174
131 fn install(mut self) { 175 fn install(mut self) {
@@ -137,9 +181,15 @@ impl Filter {
137} 181}
138 182
139struct ProfileStack { 183struct ProfileStack {
140 starts: Vec<Instant>, 184 frames: Vec<Frame>,
141 filter: Filter, 185 filter: Filter,
142 messages: Tree<Message>, 186 messages: Tree<Message>,
187 heartbeats: bool,
188}
189
190struct Frame {
191 t: Instant,
192 heartbeats: u32,
143} 193}
144 194
145#[derive(Default)] 195#[derive(Default)]
@@ -151,35 +201,49 @@ struct Message {
151 201
152impl ProfileStack { 202impl ProfileStack {
153 fn new() -> ProfileStack { 203 fn new() -> ProfileStack {
154 ProfileStack { starts: Vec::new(), messages: Tree::default(), filter: Default::default() } 204 ProfileStack {
205 frames: Vec::new(),
206 messages: Tree::default(),
207 filter: Default::default(),
208 heartbeats: false,
209 }
155 } 210 }
156 211
157 fn push(&mut self, label: Label) -> bool { 212 fn push(&mut self, label: Label) -> bool {
158 if self.starts.is_empty() { 213 if self.frames.is_empty() {
159 if let Ok(f) = FILTER.try_read() { 214 if let Ok(f) = FILTER.try_read() {
160 if f.version > self.filter.version { 215 if f.version > self.filter.version {
161 self.filter = f.clone(); 216 self.filter = f.clone();
162 } 217 }
163 }; 218 };
164 } 219 }
165 if self.starts.len() > self.filter.depth { 220 if self.frames.len() > self.filter.depth {
166 return false; 221 return false;
167 } 222 }
168 let allowed = &self.filter.allowed; 223 let allowed = &self.filter.allowed;
169 if self.starts.is_empty() && !allowed.is_empty() && !allowed.contains(label) { 224 if self.frames.is_empty() && !allowed.is_empty() && !allowed.contains(label) {
170 return false; 225 return false;
171 } 226 }
172 227
173 self.starts.push(Instant::now()); 228 self.frames.push(Frame { t: Instant::now(), heartbeats: 0 });
174 self.messages.start(); 229 self.messages.start();
175 true 230 true
176 } 231 }
177 232
178 fn pop(&mut self, label: Label, detail: Option<String>) { 233 fn pop(&mut self, label: Label, detail: Option<String>) {
179 let start = self.starts.pop().unwrap(); 234 let frame = self.frames.pop().unwrap();
180 let duration = start.elapsed(); 235 let duration = frame.t.elapsed();
236
237 if self.heartbeats {
238 self.heartbeat(frame.heartbeats);
239 let avg_span = duration / (frame.heartbeats + 1);
240 if avg_span > self.filter.heartbeat_longer_than {
241 eprintln!("Too few heartbeats {} ({}/{:?})?", label, frame.heartbeats, duration)
242 }
243 }
244
181 self.messages.finish(Message { duration, label, detail }); 245 self.messages.finish(Message { duration, label, detail });
182 if self.starts.is_empty() { 246 if self.frames.is_empty() {
183 let longer_than = self.filter.longer_than; 247 let longer_than = self.filter.longer_than;
184 // Convert to millis for comparison to avoid problems with rounding 248 // Convert to millis for comparison to avoid problems with rounding
185 // (otherwise we could print `0ms` despite user's `>0` filter when 249 // (otherwise we could print `0ms` despite user's `>0` filter when
@@ -192,6 +256,15 @@ impl ProfileStack {
192 self.messages.clear(); 256 self.messages.clear();
193 } 257 }
194 } 258 }
259
260 fn heartbeats(&mut self, yes: bool) {
261 self.heartbeats = yes;
262 }
263 fn heartbeat(&mut self, n: u32) {
264 if let Some(frame) = self.frames.last_mut() {
265 frame.heartbeats += n;
266 }
267 }
195} 268}
196 269
197fn print( 270fn print(
diff --git a/crates/profile/src/lib.rs b/crates/profile/src/lib.rs
index 79dba47d5..a31fb8f43 100644
--- a/crates/profile/src/lib.rs
+++ b/crates/profile/src/lib.rs
@@ -10,7 +10,7 @@ mod tree;
10use std::cell::RefCell; 10use std::cell::RefCell;
11 11
12pub use crate::{ 12pub use crate::{
13 hprof::{init, init_from, span}, 13 hprof::{heartbeat, heartbeat_span, init, init_from, span},
14 memory_usage::{Bytes, MemoryUsage}, 14 memory_usage::{Bytes, MemoryUsage},
15 stop_watch::{StopWatch, StopWatchSpan}, 15 stop_watch::{StopWatch, StopWatchSpan},
16}; 16};
@@ -52,7 +52,7 @@ impl Drop for Scope {
52/// Usage: 52/// Usage:
53/// 1. Install gpref_tools (https://github.com/gperftools/gperftools), probably packaged with your Linux distro. 53/// 1. Install gpref_tools (https://github.com/gperftools/gperftools), probably packaged with your Linux distro.
54/// 2. Build with `cpu_profiler` feature. 54/// 2. Build with `cpu_profiler` feature.
55/// 3. Tun the code, the *raw* output would be in the `./out.profile` file. 55/// 3. Run the code, the *raw* output would be in the `./out.profile` file.
56/// 4. Install pprof for visualization (https://github.com/google/pprof). 56/// 4. Install pprof for visualization (https://github.com/google/pprof).
57/// 5. Bump sampling frequency to once per ms: `export CPUPROFILE_FREQUENCY=1000` 57/// 5. Bump sampling frequency to once per ms: `export CPUPROFILE_FREQUENCY=1000`
58/// 6. Use something like `pprof -svg target/release/rust-analyzer ./out.profile` to see the results. 58/// 6. Use something like `pprof -svg target/release/rust-analyzer ./out.profile` to see the results.
@@ -60,8 +60,17 @@ impl Drop for Scope {
60/// For example, here's how I run profiling on NixOS: 60/// For example, here's how I run profiling on NixOS:
61/// 61///
62/// ```bash 62/// ```bash
63/// $ nix-shell -p gperftools --run \ 63/// $ bat -p shell.nix
64/// 'cargo run --release -p rust-analyzer -- parse < ~/projects/rustbench/parser.rs > /dev/null' 64/// with import <nixpkgs> {};
65/// mkShell {
66/// buildInputs = [ gperftools ];
67/// shellHook = ''
68/// export LD_LIBRARY_PATH="${gperftools}/lib:"
69/// '';
70/// }
71/// $ set -x CPUPROFILE_FREQUENCY 1000
72/// $ nix-shell --run 'cargo test --release --package rust-analyzer --lib -- benchmarks::benchmark_integrated_highlighting --exact --nocapture'
73/// $ pprof -svg target/release/deps/rust_analyzer-8739592dc93d63cb crates/rust-analyzer/out.profile > profile.svg
65/// ``` 74/// ```
66/// 75///
67/// See this diff for how to profile completions: 76/// See this diff for how to profile completions:
@@ -81,7 +90,9 @@ pub fn cpu_span() -> CpuSpan {
81 90
82 #[cfg(not(feature = "cpu_profiler"))] 91 #[cfg(not(feature = "cpu_profiler"))]
83 { 92 {
84 eprintln!("cpu_profiler feature is disabled") 93 eprintln!(
94 r#"cpu profiling is disabled, uncomment `default = [ "cpu_profiler" ]` in Cargo.toml to enable."#
95 )
85 } 96 }
86 97
87 CpuSpan { _private: () } 98 CpuSpan { _private: () }
@@ -91,7 +102,23 @@ impl Drop for CpuSpan {
91 fn drop(&mut self) { 102 fn drop(&mut self) {
92 #[cfg(feature = "cpu_profiler")] 103 #[cfg(feature = "cpu_profiler")]
93 { 104 {
94 google_cpu_profiler::stop() 105 google_cpu_profiler::stop();
106 let profile_data = std::env::current_dir().unwrap().join("out.profile");
107 eprintln!("Profile data saved to:\n\n {}\n", profile_data.display());
108 let mut cmd = std::process::Command::new("pprof");
109 cmd.arg("-svg").arg(std::env::current_exe().unwrap()).arg(&profile_data);
110 let out = cmd.output();
111
112 match out {
113 Ok(out) if out.status.success() => {
114 let svg = profile_data.with_extension("svg");
115 std::fs::write(&svg, &out.stdout).unwrap();
116 eprintln!("Profile rendered to:\n\n {}\n", svg.display());
117 }
118 _ => {
119 eprintln!("Failed to run:\n\n {:?}\n", cmd);
120 }
121 }
95 } 122 }
96 } 123 }
97} 124}
diff --git a/crates/rust-analyzer/src/benchmarks.rs b/crates/rust-analyzer/src/benchmarks.rs
new file mode 100644
index 000000000..bf569b40b
--- /dev/null
+++ b/crates/rust-analyzer/src/benchmarks.rs
@@ -0,0 +1,71 @@
1//! Fully integrated benchmarks for rust-analyzer, which load real cargo
2//! projects.
3//!
4//! The benchmark here is used to debug specific performance regressions. If you
5//! notice that, eg, completion is slow in some specific case, you can modify
6//! code here exercise this specific completion, and thus have a fast
7//! edit/compile/test cycle.
8//!
9//! Note that "Rust Analyzer: Run" action does not allow running a single test
10//! in release mode in VS Code. There's however "Rust Analyzer: Copy Run Command Line"
11//! which you can use to paste the command in terminal and add `--release` manually.
12
13use std::sync::Arc;
14
15use ide::Change;
16use test_utils::project_root;
17use vfs::{AbsPathBuf, VfsPath};
18
19use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig};
20
21#[test]
22fn benchmark_integrated_highlighting() {
23 // Don't run slow benchmark by default
24 if true {
25 return;
26 }
27
28 // Load rust-analyzer itself.
29 let workspace_to_load = project_root();
30 let file = "./crates/ide_db/src/apply_change.rs";
31
32 let cargo_config = Default::default();
33 let load_cargo_config =
34 LoadCargoConfig { load_out_dirs_from_check: true, with_proc_macro: false };
35
36 let (mut host, vfs, _proc_macro) = {
37 let _it = stdx::timeit("workspace loading");
38 load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap()
39 };
40
41 let file_id = {
42 let file = workspace_to_load.join(file);
43 let path = VfsPath::from(AbsPathBuf::assert(file));
44 vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path))
45 };
46
47 {
48 let _it = stdx::timeit("initial");
49 let analysis = host.analysis();
50 analysis.highlight_as_html(file_id, false).unwrap();
51 }
52
53 profile::init_from("*>100");
54 // let _s = profile::heartbeat_span();
55
56 {
57 let _it = stdx::timeit("change");
58 let mut text = host.analysis().file_text(file_id).unwrap().to_string();
59 text.push_str("\npub fn _dummy() {}\n");
60 let mut change = Change::new();
61 change.change_file(file_id, Some(Arc::new(text)));
62 host.apply_change(change);
63 }
64
65 {
66 let _it = stdx::timeit("after change");
67 let _span = profile::cpu_span();
68 let analysis = host.analysis();
69 analysis.highlight_as_html(file_id, false).unwrap();
70 }
71}
diff --git a/crates/rust-analyzer/src/bin/flags.rs b/crates/rust-analyzer/src/bin/flags.rs
index d8987633d..b05fc00b9 100644
--- a/crates/rust-analyzer/src/bin/flags.rs
+++ b/crates/rust-analyzer/src/bin/flags.rs
@@ -1,10 +1,9 @@
1//! Grammar for the command-line arguments. 1//! Grammar for the command-line arguments.
2#![allow(unreachable_pub)] 2#![allow(unreachable_pub)]
3use std::{env, path::PathBuf}; 3use std::path::PathBuf;
4 4
5use ide_ssr::{SsrPattern, SsrRule}; 5use ide_ssr::{SsrPattern, SsrRule};
6use rust_analyzer::cli::{BenchWhat, Position, Verbosity}; 6use rust_analyzer::cli::Verbosity;
7use vfs::AbsPathBuf;
8 7
9xflags::xflags! { 8xflags::xflags! {
10 src "./src/bin/flags.rs" 9 src "./src/bin/flags.rs"
@@ -74,27 +73,6 @@ xflags::xflags! {
74 optional --with-proc-macro 73 optional --with-proc-macro
75 } 74 }
76 75
77 /// Benchmark specific analysis operation
78 cmd analysis-bench
79 /// Directory with Cargo.toml.
80 required path: PathBuf
81 {
82 /// Collect memory usage statistics.
83 optional --memory-usage
84
85 /// Compute syntax highlighting for this file
86 optional --highlight path: PathBuf
87 /// Compute completions at file:line:column location.
88 optional --complete location: Position
89 /// Compute goto definition at file:line:column location.
90 optional --goto-def location: Position
91
92 /// Load OUT_DIR values by running `cargo check` before analysis.
93 optional --load-output-dirs
94 /// Use proc-macro-srv for proc-macro expanding.
95 optional --with-proc-macro
96 }
97
98 cmd diagnostics 76 cmd diagnostics
99 /// Directory with Cargo.toml. 77 /// Directory with Cargo.toml.
100 required path: PathBuf 78 required path: PathBuf
@@ -142,7 +120,6 @@ pub enum RustAnalyzerCmd {
142 Symbols(Symbols), 120 Symbols(Symbols),
143 Highlight(Highlight), 121 Highlight(Highlight),
144 AnalysisStats(AnalysisStats), 122 AnalysisStats(AnalysisStats),
145 AnalysisBench(AnalysisBench),
146 Diagnostics(Diagnostics), 123 Diagnostics(Diagnostics),
147 Ssr(Ssr), 124 Ssr(Ssr),
148 Search(Search), 125 Search(Search),
@@ -184,18 +161,6 @@ pub struct AnalysisStats {
184} 161}
185 162
186#[derive(Debug)] 163#[derive(Debug)]
187pub struct AnalysisBench {
188 pub path: PathBuf,
189
190 pub memory_usage: bool,
191 pub highlight: Option<PathBuf>,
192 pub complete: Option<Position>,
193 pub goto_def: Option<Position>,
194 pub load_output_dirs: bool,
195 pub with_proc_macro: bool,
196}
197
198#[derive(Debug)]
199pub struct Diagnostics { 164pub struct Diagnostics {
200 pub path: PathBuf, 165 pub path: PathBuf,
201 166
@@ -239,17 +204,3 @@ impl RustAnalyzer {
239 } 204 }
240 } 205 }
241} 206}
242
243impl AnalysisBench {
244 pub(crate) fn what(&self) -> BenchWhat {
245 match (&self.highlight, &self.complete, &self.goto_def) {
246 (Some(path), None, None) => {
247 let path = env::current_dir().unwrap().join(path);
248 BenchWhat::Highlight { path: AbsPathBuf::assert(path) }
249 }
250 (None, Some(position), None) => BenchWhat::Complete(position.clone()),
251 (None, None, Some(position)) => BenchWhat::GotoDef(position.clone()),
252 _ => panic!("exactly one of `--highlight`, `--complete` or `--goto-def` must be set"),
253 }
254 }
255}
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs
index a0b611bff..ae99eefe3 100644
--- a/crates/rust-analyzer/src/bin/main.rs
+++ b/crates/rust-analyzer/src/bin/main.rs
@@ -9,7 +9,7 @@ use std::{convert::TryFrom, env, fs, path::Path, process};
9use lsp_server::Connection; 9use lsp_server::Connection;
10use project_model::ProjectManifest; 10use project_model::ProjectManifest;
11use rust_analyzer::{ 11use rust_analyzer::{
12 cli::{self, AnalysisStatsCmd, BenchCmd}, 12 cli::{self, AnalysisStatsCmd},
13 config::Config, 13 config::Config,
14 from_json, 14 from_json,
15 lsp_ext::supports_utf8, 15 lsp_ext::supports_utf8,
@@ -80,17 +80,6 @@ fn try_main() -> Result<()> {
80 with_proc_macro: cmd.with_proc_macro, 80 with_proc_macro: cmd.with_proc_macro,
81 } 81 }
82 .run(verbosity)?, 82 .run(verbosity)?,
83 flags::RustAnalyzerCmd::AnalysisBench(cmd) => {
84 let what = cmd.what();
85 BenchCmd {
86 memory_usage: cmd.memory_usage,
87 path: cmd.path,
88 load_output_dirs: cmd.load_output_dirs,
89 with_proc_macro: cmd.with_proc_macro,
90 what,
91 }
92 .run(verbosity)?
93 }
94 83
95 flags::RustAnalyzerCmd::Diagnostics(cmd) => { 84 flags::RustAnalyzerCmd::Diagnostics(cmd) => {
96 cli::diagnostics(&cmd.path, cmd.load_output_dirs, cmd.with_proc_macro)? 85 cli::diagnostics(&cmd.path, cmd.load_output_dirs, cmd.with_proc_macro)?
diff --git a/crates/rust-analyzer/src/cli.rs b/crates/rust-analyzer/src/cli.rs
index ed732eb38..76b666dc2 100644
--- a/crates/rust-analyzer/src/cli.rs
+++ b/crates/rust-analyzer/src/cli.rs
@@ -1,8 +1,7 @@
1//! Various batch processing tasks, intended primarily for debugging. 1//! Various batch processing tasks, intended primarily for debugging.
2 2
3mod load_cargo; 3pub(crate) mod load_cargo;
4mod analysis_stats; 4mod analysis_stats;
5mod analysis_bench;
6mod diagnostics; 5mod diagnostics;
7mod progress_report; 6mod progress_report;
8mod ssr; 7mod ssr;
@@ -15,7 +14,6 @@ use syntax::{AstNode, SourceFile};
15use vfs::Vfs; 14use vfs::Vfs;
16 15
17pub use self::{ 16pub use self::{
18 analysis_bench::{BenchCmd, BenchWhat, Position},
19 analysis_stats::AnalysisStatsCmd, 17 analysis_stats::AnalysisStatsCmd,
20 diagnostics::diagnostics, 18 diagnostics::diagnostics,
21 load_cargo::{load_workspace, load_workspace_at, LoadCargoConfig}, 19 load_cargo::{load_workspace, load_workspace_at, LoadCargoConfig},
diff --git a/crates/rust-analyzer/src/cli/analysis_bench.rs b/crates/rust-analyzer/src/cli/analysis_bench.rs
deleted file mode 100644
index 49994824f..000000000
--- a/crates/rust-analyzer/src/cli/analysis_bench.rs
+++ /dev/null
@@ -1,196 +0,0 @@
1//! Benchmark operations like highlighting or goto definition.
2
3use std::{env, path::PathBuf, str::FromStr, sync::Arc, time::Instant};
4
5use anyhow::{bail, format_err, Result};
6use hir::PrefixKind;
7use ide::{
8 Analysis, AnalysisHost, Change, CompletionConfig, DiagnosticsConfig, FilePosition, LineCol,
9};
10use ide_db::{
11 base_db::{
12 salsa::{Database, Durability},
13 FileId,
14 },
15 helpers::{insert_use::InsertUseConfig, SnippetCap},
16};
17use vfs::AbsPathBuf;
18
19use crate::cli::{
20 load_cargo::{load_workspace_at, LoadCargoConfig},
21 print_memory_usage, Verbosity,
22};
23
24pub struct BenchCmd {
25 pub path: PathBuf,
26 pub what: BenchWhat,
27 pub memory_usage: bool,
28 pub load_output_dirs: bool,
29 pub with_proc_macro: bool,
30}
31
32pub enum BenchWhat {
33 Highlight { path: AbsPathBuf },
34 Complete(Position),
35 GotoDef(Position),
36}
37
38#[derive(Debug, Clone)]
39pub struct Position {
40 pub path: AbsPathBuf,
41 pub line: u32,
42 pub column: u32,
43}
44
45impl FromStr for Position {
46 type Err = anyhow::Error;
47 fn from_str(s: &str) -> Result<Self> {
48 let mut split = s.rsplitn(3, ':');
49 match (split.next(), split.next(), split.next()) {
50 (Some(column), Some(line), Some(path)) => {
51 let path = env::current_dir().unwrap().join(path);
52 let path = AbsPathBuf::assert(path);
53 Ok(Position { path, line: line.parse()?, column: column.parse()? })
54 }
55 _ => bail!("position should be in file:line:column format: {:?}", s),
56 }
57 }
58}
59
60impl BenchCmd {
61 pub fn run(self, verbosity: Verbosity) -> Result<()> {
62 profile::init();
63
64 let start = Instant::now();
65 eprint!("loading: ");
66
67 let cargo_config = Default::default();
68 let load_cargo_config = LoadCargoConfig {
69 load_out_dirs_from_check: self.load_output_dirs,
70 with_proc_macro: self.with_proc_macro,
71 };
72 let (mut host, vfs, _proc_macro) =
73 load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?;
74 eprintln!("{:?}\n", start.elapsed());
75
76 let file_id = {
77 let path = match &self.what {
78 BenchWhat::Highlight { path } => path,
79 BenchWhat::Complete(pos) | BenchWhat::GotoDef(pos) => &pos.path,
80 };
81 let path = path.clone().into();
82 vfs.file_id(&path).ok_or_else(|| format_err!("Can't find {}", path))?
83 };
84
85 match &self.what {
86 BenchWhat::Highlight { .. } => {
87 let res = do_work(&mut host, file_id, |analysis| {
88 analysis.diagnostics(&DiagnosticsConfig::default(), file_id).unwrap();
89 analysis.highlight_as_html(file_id, false).unwrap()
90 });
91 if verbosity.is_verbose() {
92 println!("\n{}", res);
93 }
94 }
95 BenchWhat::Complete(pos) | BenchWhat::GotoDef(pos) => {
96 let is_completion = matches!(self.what, BenchWhat::Complete(..));
97
98 let offset = host
99 .analysis()
100 .file_line_index(file_id)?
101 .offset(LineCol { line: pos.line - 1, col: pos.column });
102 let file_position = FilePosition { file_id, offset };
103
104 if is_completion {
105 let options = CompletionConfig {
106 enable_postfix_completions: true,
107 enable_imports_on_the_fly: true,
108 add_call_parenthesis: true,
109 add_call_argument_snippets: true,
110 snippet_cap: SnippetCap::new(true),
111 insert_use: InsertUseConfig {
112 merge: None,
113 prefix_kind: PrefixKind::Plain,
114 group: true,
115 },
116 };
117 let res = do_work(&mut host, file_id, |analysis| {
118 analysis.completions(&options, file_position)
119 });
120 if verbosity.is_verbose() {
121 println!("\n{:#?}", res);
122 }
123 } else {
124 let res = do_work(&mut host, file_id, |analysis| {
125 analysis.goto_definition(file_position)
126 });
127 if verbosity.is_verbose() {
128 println!("\n{:#?}", res);
129 }
130 }
131 }
132 }
133
134 if self.memory_usage {
135 print_memory_usage(host, vfs);
136 }
137
138 Ok(())
139 }
140}
141
142fn do_work<F: Fn(&Analysis) -> T, T>(host: &mut AnalysisHost, file_id: FileId, work: F) -> T {
143 {
144 let start = Instant::now();
145 eprint!("from scratch: ");
146 work(&host.analysis());
147 eprintln!("{:?}", start.elapsed());
148 }
149 {
150 let start = Instant::now();
151 eprint!("no change: ");
152 work(&host.analysis());
153 eprintln!("{:?}", start.elapsed());
154 }
155 {
156 let start = Instant::now();
157 eprint!("trivial change: ");
158 host.raw_database_mut().salsa_runtime_mut().synthetic_write(Durability::LOW);
159 work(&host.analysis());
160 eprintln!("{:?}", start.elapsed());
161 }
162 {
163 let start = Instant::now();
164 eprint!("comment change: ");
165 {
166 let mut text = host.analysis().file_text(file_id).unwrap().to_string();
167 text.push_str("\n/* Hello world */\n");
168 let mut change = Change::new();
169 change.change_file(file_id, Some(Arc::new(text)));
170 host.apply_change(change);
171 }
172 work(&host.analysis());
173 eprintln!("{:?}", start.elapsed());
174 }
175 {
176 let start = Instant::now();
177 eprint!("item change: ");
178 {
179 let mut text = host.analysis().file_text(file_id).unwrap().to_string();
180 text.push_str("\npub fn _dummy() {}\n");
181 let mut change = Change::new();
182 change.change_file(file_id, Some(Arc::new(text)));
183 host.apply_change(change);
184 }
185 work(&host.analysis());
186 eprintln!("{:?}", start.elapsed());
187 }
188 {
189 let start = Instant::now();
190 eprint!("const change: ");
191 host.raw_database_mut().salsa_runtime_mut().synthetic_write(Durability::HIGH);
192 let res = work(&host.analysis());
193 eprintln!("{:?}", start.elapsed());
194 res
195 }
196}
diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs
index 8b874239c..d9a5030a0 100644
--- a/crates/rust-analyzer/src/lib.rs
+++ b/crates/rust-analyzer/src/lib.rs
@@ -39,6 +39,9 @@ mod op_queue;
39pub mod lsp_ext; 39pub mod lsp_ext;
40pub mod config; 40pub mod config;
41 41
42#[cfg(test)]
43mod benchmarks;
44
42use serde::de::DeserializeOwned; 45use serde::de::DeserializeOwned;
43use std::fmt; 46use std::fmt;
44 47
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs
index 530c8a5a4..e297a72e6 100644
--- a/crates/rust-analyzer/src/to_proto.rs
+++ b/crates/rust-analyzer/src/to_proto.rs
@@ -492,7 +492,11 @@ pub(crate) fn folding_range(
492 FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment), 492 FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment),
493 FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports), 493 FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports),
494 FoldKind::Region => Some(lsp_types::FoldingRangeKind::Region), 494 FoldKind::Region => Some(lsp_types::FoldingRangeKind::Region),
495 FoldKind::Mods | FoldKind::Block | FoldKind::ArgList => None, 495 FoldKind::Mods
496 | FoldKind::Block
497 | FoldKind::ArgList
498 | FoldKind::Consts
499 | FoldKind::Statics => None,
496 }; 500 };
497 501
498 let range = range(line_index, fold.range); 502 let range = range(line_index, fold.range);
diff --git a/docs/dev/README.md b/docs/dev/README.md
index 57162a47d..eab21a765 100644
--- a/docs/dev/README.md
+++ b/docs/dev/README.md
@@ -1,7 +1,7 @@
1# Contributing Quick Start 1# Contributing Quick Start
2 2
3Rust Analyzer is an ordinary Rust project, which is organized as a Cargo 3Rust Analyzer is an ordinary Rust project, which is organized as a Cargo workspace, builds on stable and doesn't depend on C libraries.
4workspace, builds on stable and doesn't depend on C libraries. So, just 4So, just
5 5
6``` 6```
7$ cargo test 7$ cargo test
@@ -13,9 +13,8 @@ To learn more about how rust-analyzer works, see [./architecture.md](./architect
13It also explains the high-level layout of the source code. 13It also explains the high-level layout of the source code.
14Do skim through that document. 14Do skim through that document.
15 15
16We also publish rustdoc docs to pages: 16We also publish rustdoc docs to pages: https://rust-analyzer.github.io/rust-analyzer/ide/.
17 17Note though, that internal documentation is very incomplete.
18https://rust-analyzer.github.io/rust-analyzer/ide/
19 18
20Various organizational and process issues are discussed in this document. 19Various organizational and process issues are discussed in this document.
21 20
@@ -49,21 +48,28 @@ https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Fwg-rls-2.2E0
49 Also a kind of fun. 48 Also a kind of fun.
50 These issues should generally include a link to a Zulip discussion thread. 49 These issues should generally include a link to a Zulip discussion thread.
51 50
52# CI 51# Code Style & Review Process
52
53Do see [./style.md](./style.md).
53 54
54We use GitHub Actions for CI. Most of the things, including formatting, are checked by 55# Cookbook
55`cargo test` so, if `cargo test` passes locally, that's a good sign that CI will 56
56be green as well. The only exception is that some long-running tests are skipped locally by default. 57## CI
58
59We use GitHub Actions for CI.
60Most of the things, including formatting, are checked by `cargo test`.
61If `cargo test` passes locally, that's a good sign that CI will be green as well.
62The only exception is that some long-running tests are skipped locally by default.
57Use `env RUN_SLOW_TESTS=1 cargo test` to run the full suite. 63Use `env RUN_SLOW_TESTS=1 cargo test` to run the full suite.
58 64
59We use bors-ng to enforce the [not rocket science](https://graydon2.dreamwidth.org/1597.html) rule. 65We use bors-ng to enforce the [not rocket science](https://graydon2.dreamwidth.org/1597.html) rule.
60 66
61# Launching rust-analyzer 67## Launching rust-analyzer
62 68
63Debugging the language server can be tricky. 69Debugging the language server can be tricky.
64LSP is rather chatty, so driving it from the command line is not really feasible, driving it via VS Code requires interacting with two processes. 70LSP is rather chatty, so driving it from the command line is not really feasible, driving it via VS Code requires interacting with two processes.
65 71
66For this reason, the best way to see how rust-analyzer works is to find a relevant test and execute it. 72For this reason, the best way to see how rust-analyzer works is to **find a relevant test and execute it**.
67VS Code & Emacs include an action for running a single test. 73VS Code & Emacs include an action for running a single test.
68 74
69Launching a VS Code instance with a locally built language server is also possible. 75Launching a VS Code instance with a locally built language server is also possible.
@@ -107,12 +113,7 @@ cd editors/code
107npm ci 113npm ci
108npm run lint 114npm run lint
109``` 115```
110 116## How to ...
111# Code Style & Review Process
112
113Do see [./style.md](./style.md).
114
115# How to ...
116 117
117* ... add an assist? [#7535](https://github.com/rust-analyzer/rust-analyzer/pull/7535) 118* ... add an assist? [#7535](https://github.com/rust-analyzer/rust-analyzer/pull/7535)
118* ... add a new protocol extension? [#4569](https://github.com/rust-analyzer/rust-analyzer/pull/4569) 119* ... add a new protocol extension? [#4569](https://github.com/rust-analyzer/rust-analyzer/pull/4569)
@@ -120,33 +121,30 @@ Do see [./style.md](./style.md).
120* ... add a new completion? [#6964](https://github.com/rust-analyzer/rust-analyzer/pull/6964) 121* ... add a new completion? [#6964](https://github.com/rust-analyzer/rust-analyzer/pull/6964)
121* ... allow new syntax in the parser? [#7338](https://github.com/rust-analyzer/rust-analyzer/pull/7338) 122* ... allow new syntax in the parser? [#7338](https://github.com/rust-analyzer/rust-analyzer/pull/7338)
122 123
123# Logging 124## Logging
124 125
125Logging is done by both rust-analyzer and VS Code, so it might be tricky to 126Logging is done by both rust-analyzer and VS Code, so it might be tricky to figure out where logs go.
126figure out where logs go.
127 127
128Inside rust-analyzer, we use the standard `log` crate for logging, and 128Inside rust-analyzer, we use the standard `log` crate for logging, and `env_logger` for logging frontend.
129`env_logger` for logging frontend. By default, log goes to stderr, but the 129By default, log goes to stderr, but the stderr itself is processed by VS Code.
130stderr itself is processed by VS Code. 130`--log-file <PATH>` CLI argument allows logging to file.
131 131
132To see stderr in the running VS Code instance, go to the "Output" tab of the 132To see stderr in the running VS Code instance, go to the "Output" tab of the panel and select `rust-analyzer`.
133panel and select `rust-analyzer`. This shows `eprintln!` as well. Note that 133This shows `eprintln!` as well.
134`stdout` is used for the actual protocol, so `println!` will break things. 134Note that `stdout` is used for the actual protocol, so `println!` will break things.
135 135
136To log all communication between the server and the client, there are two choices: 136To log all communication between the server and the client, there are two choices:
137 137
138* you can log on the server side, by running something like 138* You can log on the server side, by running something like
139 ``` 139 ```
140 env RA_LOG=lsp_server=debug code . 140 env RA_LOG=lsp_server=debug code .
141 ``` 141 ```
142 142* You can log on the client side, by enabling `"rust-analyzer.trace.server": "verbose"` workspace setting.
143* you can log on the client side, by enabling `"rust-analyzer.trace.server": 143 These logs are shown in a separate tab in the output and could be used with LSP inspector.
144 "verbose"` workspace setting. These logs are shown in a separate tab in the 144 Kudos to [@DJMcNab](https://github.com/DJMcNab) for setting this awesome infra up!
145 output and could be used with LSP inspector. Kudos to
146 [@DJMcNab](https://github.com/DJMcNab) for setting this awesome infra up!
147 145
148 146
149There are also two VS Code commands which might be of interest: 147There are also several VS Code commands which might be of interest:
150 148
151* `Rust Analyzer: Status` shows some memory-usage statistics. 149* `Rust Analyzer: Status` shows some memory-usage statistics.
152 150
@@ -164,7 +162,7 @@ There are also two VS Code commands which might be of interest:
164 162
165 ![demo](https://user-images.githubusercontent.com/36276403/78225773-6636a480-74d3-11ea-9d9f-1c9d42da03b0.png) 163 ![demo](https://user-images.githubusercontent.com/36276403/78225773-6636a480-74d3-11ea-9d9f-1c9d42da03b0.png)
166 164
167# Profiling 165## Profiling
168 166
169We have a built-in hierarchical profiler, you can enable it by using `RA_PROFILE` env-var: 167We have a built-in hierarchical profiler, you can enable it by using `RA_PROFILE` env-var:
170 168
@@ -192,7 +190,9 @@ $ cargo run --release -p rust-analyzer -- analysis-bench ../chalk/ --highlight .
192$ cargo run --release -p rust-analyzer -- analysis-bench ../chalk/ --complete ../chalk/chalk-engine/src/logic.rs:94:0 190$ cargo run --release -p rust-analyzer -- analysis-bench ../chalk/ --complete ../chalk/chalk-engine/src/logic.rs:94:0
193``` 191```
194 192
195# Release Process 193Look for `fn benchmark_xxx` tests for a quick way to reproduce performance problems.
194
195## Release Process
196 196
197Release process is handled by `release`, `dist` and `promote` xtasks, `release` being the main one. 197Release process is handled by `release`, `dist` and `promote` xtasks, `release` being the main one.
198 198
@@ -229,7 +229,7 @@ Make sure to remove the new changelog post created when running `cargo xtask rel
229We release "nightly" every night automatically and promote the latest nightly to "stable" manually, every week. 229We release "nightly" every night automatically and promote the latest nightly to "stable" manually, every week.
230We don't do "patch" releases, unless something truly egregious comes up. 230We don't do "patch" releases, unless something truly egregious comes up.
231 231
232# Permissions 232## Permissions
233 233
234There are three sets of people with extra permissions: 234There are three sets of people with extra permissions:
235 235
diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc
index b1beeb883..e74b287fb 100644
--- a/docs/user/manual.adoc
+++ b/docs/user/manual.adoc
@@ -254,23 +254,10 @@ let g:LanguageClient_serverCommands = {
254 254
255==== YouCompleteMe 255==== YouCompleteMe
256 256
2571. Install YouCompleteMe by following the instructions 257Install YouCompleteMe by following the instructions
258 https://github.com/ycm-core/lsp-examples#rust-rust-analyzer[here] 258 https://github.com/ycm-core/YouCompleteMe#installation[here].
259 259
2602. Configure by adding this to your vim/neovim config file (replacing the existing Rust-specific line if it exists): 260rust-analyzer is the default in ycm, it should work out of the box.
261+
262[source,vim]
263----
264let g:ycm_language_server =
265\ [
266\ {
267\ 'name': 'rust',
268\ 'cmdline': ['rust-analyzer'],
269\ 'filetypes': ['rust'],
270\ 'project_root_files': ['Cargo.toml']
271\ }
272\ ]
273----
274 261
275==== ALE 262==== ALE
276 263