diff options
Diffstat (limited to 'crates')
-rw-r--r-- | crates/hir_def/src/attr.rs | 12 | ||||
-rw-r--r-- | crates/hir_expand/src/db.rs | 33 | ||||
-rw-r--r-- | crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html | 2 | ||||
-rw-r--r-- | crates/ide/src/syntax_highlighting/tests.rs | 2 | ||||
-rw-r--r-- | crates/rust-analyzer/src/benchmarks.rs | 74 | ||||
-rw-r--r-- | crates/rust-analyzer/src/integrated_benchmarks.rs | 184 | ||||
-rw-r--r-- | crates/rust-analyzer/src/lib.rs | 2 |
7 files changed, 216 insertions, 93 deletions
diff --git a/crates/hir_def/src/attr.rs b/crates/hir_def/src/attr.rs index d9294d93a..0171d8a92 100644 --- a/crates/hir_def/src/attr.rs +++ b/crates/hir_def/src/attr.rs | |||
@@ -484,10 +484,10 @@ impl AttrsWithOwner { | |||
484 | let mut buf = String::new(); | 484 | let mut buf = String::new(); |
485 | let mut mapping = Vec::new(); | 485 | let mut mapping = Vec::new(); |
486 | for (doc, idx) in docs { | 486 | for (doc, idx) in docs { |
487 | // str::lines doesn't yield anything for the empty string | ||
488 | if !doc.is_empty() { | 487 | if !doc.is_empty() { |
489 | for line in doc.split('\n') { | 488 | let mut base_offset = 0; |
490 | let line = line.trim_end(); | 489 | for raw_line in doc.split('\n') { |
490 | let line = raw_line.trim_end(); | ||
491 | let line_len = line.len(); | 491 | let line_len = line.len(); |
492 | let (offset, line) = match line.char_indices().nth(indent) { | 492 | let (offset, line) = match line.char_indices().nth(indent) { |
493 | Some((offset, _)) => (offset, &line[offset..]), | 493 | Some((offset, _)) => (offset, &line[offset..]), |
@@ -498,9 +498,13 @@ impl AttrsWithOwner { | |||
498 | mapping.push(( | 498 | mapping.push(( |
499 | TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?), | 499 | TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?), |
500 | idx, | 500 | idx, |
501 | TextRange::new(offset.try_into().ok()?, line_len.try_into().ok()?), | 501 | TextRange::at( |
502 | (base_offset + offset).try_into().ok()?, | ||
503 | line_len.try_into().ok()?, | ||
504 | ), | ||
502 | )); | 505 | )); |
503 | buf.push('\n'); | 506 | buf.push('\n'); |
507 | base_offset += raw_line.len() + 1; | ||
504 | } | 508 | } |
505 | } else { | 509 | } else { |
506 | buf.push('\n'); | 510 | buf.push('\n'); |
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index 1e4b0cc19..1389e30db 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs | |||
@@ -3,14 +3,14 @@ | |||
3 | use std::sync::Arc; | 3 | use std::sync::Arc; |
4 | 4 | ||
5 | use base_db::{salsa, SourceDatabase}; | 5 | use base_db::{salsa, SourceDatabase}; |
6 | use mbe::{ExpandError, ExpandResult, MacroDef, MacroRules}; | 6 | use mbe::{ExpandError, ExpandResult}; |
7 | use parser::FragmentKind; | 7 | use parser::FragmentKind; |
8 | use syntax::{ | 8 | use syntax::{ |
9 | algo::diff, | 9 | algo::diff, |
10 | ast::{MacroStmts, NameOwner}, | 10 | ast::{self, NameOwner}, |
11 | AstNode, GreenNode, Parse, | 11 | AstNode, GreenNode, Parse, |
12 | SyntaxKind::*, | 12 | SyntaxKind::*, |
13 | SyntaxNode, | 13 | SyntaxNode, SyntaxToken, |
14 | }; | 14 | }; |
15 | 15 | ||
16 | use crate::{ | 16 | use crate::{ |
@@ -27,15 +27,20 @@ const TOKEN_LIMIT: usize = 524288; | |||
27 | 27 | ||
28 | #[derive(Debug, Clone, Eq, PartialEq)] | 28 | #[derive(Debug, Clone, Eq, PartialEq)] |
29 | pub enum TokenExpander { | 29 | pub enum TokenExpander { |
30 | /// Old-style `macro_rules`. | ||
30 | MacroRules(mbe::MacroRules), | 31 | MacroRules(mbe::MacroRules), |
32 | /// AKA macros 2.0. | ||
31 | MacroDef(mbe::MacroDef), | 33 | MacroDef(mbe::MacroDef), |
34 | /// Stuff like `line!` and `file!`. | ||
32 | Builtin(BuiltinFnLikeExpander), | 35 | Builtin(BuiltinFnLikeExpander), |
36 | /// `derive(Copy)` and such. | ||
33 | BuiltinDerive(BuiltinDeriveExpander), | 37 | BuiltinDerive(BuiltinDeriveExpander), |
38 | /// The thing we love the most here in rust-analyzer -- procedural macros. | ||
34 | ProcMacro(ProcMacroExpander), | 39 | ProcMacro(ProcMacroExpander), |
35 | } | 40 | } |
36 | 41 | ||
37 | impl TokenExpander { | 42 | impl TokenExpander { |
38 | pub fn expand( | 43 | fn expand( |
39 | &self, | 44 | &self, |
40 | db: &dyn AstDatabase, | 45 | db: &dyn AstDatabase, |
41 | id: LazyMacroId, | 46 | id: LazyMacroId, |
@@ -56,7 +61,7 @@ impl TokenExpander { | |||
56 | } | 61 | } |
57 | } | 62 | } |
58 | 63 | ||
59 | pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { | 64 | pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { |
60 | match self { | 65 | match self { |
61 | TokenExpander::MacroRules(it) => it.map_id_down(id), | 66 | TokenExpander::MacroRules(it) => it.map_id_down(id), |
62 | TokenExpander::MacroDef(it) => it.map_id_down(id), | 67 | TokenExpander::MacroDef(it) => it.map_id_down(id), |
@@ -66,7 +71,7 @@ impl TokenExpander { | |||
66 | } | 71 | } |
67 | } | 72 | } |
68 | 73 | ||
69 | pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { | 74 | pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { |
70 | match self { | 75 | match self { |
71 | TokenExpander::MacroRules(it) => it.map_id_up(id), | 76 | TokenExpander::MacroRules(it) => it.map_id_up(id), |
72 | TokenExpander::MacroDef(it) => it.map_id_up(id), | 77 | TokenExpander::MacroDef(it) => it.map_id_up(id), |
@@ -115,9 +120,9 @@ pub trait AstDatabase: SourceDatabase { | |||
115 | pub fn expand_hypothetical( | 120 | pub fn expand_hypothetical( |
116 | db: &dyn AstDatabase, | 121 | db: &dyn AstDatabase, |
117 | actual_macro_call: MacroCallId, | 122 | actual_macro_call: MacroCallId, |
118 | hypothetical_args: &syntax::ast::TokenTree, | 123 | hypothetical_args: &ast::TokenTree, |
119 | token_to_map: syntax::SyntaxToken, | 124 | token_to_map: SyntaxToken, |
120 | ) -> Option<(SyntaxNode, syntax::SyntaxToken)> { | 125 | ) -> Option<(SyntaxNode, SyntaxToken)> { |
121 | let macro_file = MacroFile { macro_call_id: actual_macro_call }; | 126 | let macro_file = MacroFile { macro_call_id: actual_macro_call }; |
122 | let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()); | 127 | let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()); |
123 | let range = | 128 | let range = |
@@ -141,10 +146,10 @@ fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { | |||
141 | fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { | 146 | fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { |
142 | match id.kind { | 147 | match id.kind { |
143 | MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) { | 148 | MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) { |
144 | syntax::ast::Macro::MacroRules(macro_rules) => { | 149 | ast::Macro::MacroRules(macro_rules) => { |
145 | let arg = macro_rules.token_tree()?; | 150 | let arg = macro_rules.token_tree()?; |
146 | let (tt, tmap) = mbe::ast_to_token_tree(&arg); | 151 | let (tt, tmap) = mbe::ast_to_token_tree(&arg); |
147 | let rules = match MacroRules::parse(&tt) { | 152 | let rules = match mbe::MacroRules::parse(&tt) { |
148 | Ok(it) => it, | 153 | Ok(it) => it, |
149 | Err(err) => { | 154 | Err(err) => { |
150 | let name = macro_rules.name().map(|n| n.to_string()).unwrap_or_default(); | 155 | let name = macro_rules.name().map(|n| n.to_string()).unwrap_or_default(); |
@@ -154,10 +159,10 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, | |||
154 | }; | 159 | }; |
155 | Some(Arc::new((TokenExpander::MacroRules(rules), tmap))) | 160 | Some(Arc::new((TokenExpander::MacroRules(rules), tmap))) |
156 | } | 161 | } |
157 | syntax::ast::Macro::MacroDef(macro_def) => { | 162 | ast::Macro::MacroDef(macro_def) => { |
158 | let arg = macro_def.body()?; | 163 | let arg = macro_def.body()?; |
159 | let (tt, tmap) = mbe::ast_to_token_tree(&arg); | 164 | let (tt, tmap) = mbe::ast_to_token_tree(&arg); |
160 | let rules = match MacroDef::parse(&tt) { | 165 | let rules = match mbe::MacroDef::parse(&tt) { |
161 | Ok(it) => it, | 166 | Ok(it) => it, |
162 | Err(err) => { | 167 | Err(err) => { |
163 | let name = macro_def.name().map(|n| n.to_string()).unwrap_or_default(); | 168 | let name = macro_def.name().map(|n| n.to_string()).unwrap_or_default(); |
@@ -403,7 +408,7 @@ fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool { | |||
403 | if diff(from, to).is_empty() { | 408 | if diff(from, to).is_empty() { |
404 | return true; | 409 | return true; |
405 | } | 410 | } |
406 | if let Some(stmts) = MacroStmts::cast(from.clone()) { | 411 | if let Some(stmts) = ast::MacroStmts::cast(from.clone()) { |
407 | if stmts.statements().any(|stmt| diff(stmt.syntax(), to).is_empty()) { | 412 | if stmts.statements().any(|stmt| diff(stmt.syntax(), to).is_empty()) { |
408 | return true; | 413 | return true; |
409 | } | 414 | } |
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html index 638f42c2f..8d83ba206 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html | |||
@@ -142,6 +142,7 @@ It is beyond me why you'd use these when you got /// | |||
142 | ```rust | 142 | ```rust |
143 | </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation"> | 143 | </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation"> |
144 | ``` | 144 | ``` |
145 | </span><span class="function documentation injected intra_doc_link">[`block_comments2`]</span><span class="comment documentation"> tests these with indentation | ||
145 | */</span> | 146 | */</span> |
146 | <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration">block_comments</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span> | 147 | <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration">block_comments</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span> |
147 | 148 | ||
@@ -150,5 +151,6 @@ It is beyond me why you'd use these when you got /// | |||
150 | ```rust | 151 | ```rust |
151 | </span><span class="comment documentation"> </span><span class="none injected"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation"> | 152 | </span><span class="comment documentation"> </span><span class="none injected"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation"> |
152 | ``` | 153 | ``` |
154 | </span><span class="function documentation injected intra_doc_link">[`block_comments`]</span><span class="comment documentation"> tests these without indentation | ||
153 | */</span> | 155 | */</span> |
154 | <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration">block_comments2</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span></code></pre> \ No newline at end of file | 156 | <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration">block_comments2</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span></code></pre> \ No newline at end of file |
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 17cc6334b..b6e952b08 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs | |||
@@ -618,6 +618,7 @@ It is beyond me why you'd use these when you got /// | |||
618 | ```rust | 618 | ```rust |
619 | let _ = example(&[1, 2, 3]); | 619 | let _ = example(&[1, 2, 3]); |
620 | ``` | 620 | ``` |
621 | [`block_comments2`] tests these with indentation | ||
621 | */ | 622 | */ |
622 | pub fn block_comments() {} | 623 | pub fn block_comments() {} |
623 | 624 | ||
@@ -626,6 +627,7 @@ pub fn block_comments() {} | |||
626 | ```rust | 627 | ```rust |
627 | let _ = example(&[1, 2, 3]); | 628 | let _ = example(&[1, 2, 3]); |
628 | ``` | 629 | ``` |
630 | [`block_comments`] tests these without indentation | ||
629 | */ | 631 | */ |
630 | pub fn block_comments2() {} | 632 | pub fn block_comments2() {} |
631 | "# | 633 | "# |
diff --git a/crates/rust-analyzer/src/benchmarks.rs b/crates/rust-analyzer/src/benchmarks.rs deleted file mode 100644 index bdd94b1c4..000000000 --- a/crates/rust-analyzer/src/benchmarks.rs +++ /dev/null | |||
@@ -1,74 +0,0 @@ | |||
1 | //! Fully integrated benchmarks for rust-analyzer, which load real cargo | ||
2 | //! projects. | ||
3 | //! | ||
4 | //! The benchmark here is used to debug specific performance regressions. If you | ||
5 | //! notice that, eg, completion is slow in some specific case, you can modify | ||
6 | //! code here exercise this specific completion, and thus have a fast | ||
7 | //! edit/compile/test cycle. | ||
8 | //! | ||
9 | //! Note that "Rust Analyzer: Run" action does not allow running a single test | ||
10 | //! in release mode in VS Code. There's however "Rust Analyzer: Copy Run Command Line" | ||
11 | //! which you can use to paste the command in terminal and add `--release` manually. | ||
12 | |||
13 | use std::sync::Arc; | ||
14 | |||
15 | use ide::Change; | ||
16 | use test_utils::project_root; | ||
17 | use vfs::{AbsPathBuf, VfsPath}; | ||
18 | |||
19 | use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig}; | ||
20 | |||
21 | #[test] | ||
22 | fn benchmark_integrated_highlighting() { | ||
23 | // Don't run slow benchmark by default | ||
24 | if true { | ||
25 | return; | ||
26 | } | ||
27 | |||
28 | // Load rust-analyzer itself. | ||
29 | let workspace_to_load = project_root(); | ||
30 | let file = "./crates/ide_db/src/apply_change.rs"; | ||
31 | |||
32 | let cargo_config = Default::default(); | ||
33 | let load_cargo_config = LoadCargoConfig { | ||
34 | load_out_dirs_from_check: true, | ||
35 | wrap_rustc: false, | ||
36 | with_proc_macro: false, | ||
37 | }; | ||
38 | |||
39 | let (mut host, vfs, _proc_macro) = { | ||
40 | let _it = stdx::timeit("workspace loading"); | ||
41 | load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap() | ||
42 | }; | ||
43 | |||
44 | let file_id = { | ||
45 | let file = workspace_to_load.join(file); | ||
46 | let path = VfsPath::from(AbsPathBuf::assert(file)); | ||
47 | vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path)) | ||
48 | }; | ||
49 | |||
50 | { | ||
51 | let _it = stdx::timeit("initial"); | ||
52 | let analysis = host.analysis(); | ||
53 | analysis.highlight_as_html(file_id, false).unwrap(); | ||
54 | } | ||
55 | |||
56 | profile::init_from("*>100"); | ||
57 | // let _s = profile::heartbeat_span(); | ||
58 | |||
59 | { | ||
60 | let _it = stdx::timeit("change"); | ||
61 | let mut text = host.analysis().file_text(file_id).unwrap().to_string(); | ||
62 | text.push_str("\npub fn _dummy() {}\n"); | ||
63 | let mut change = Change::new(); | ||
64 | change.change_file(file_id, Some(Arc::new(text))); | ||
65 | host.apply_change(change); | ||
66 | } | ||
67 | |||
68 | { | ||
69 | let _it = stdx::timeit("after change"); | ||
70 | let _span = profile::cpu_span(); | ||
71 | let analysis = host.analysis(); | ||
72 | analysis.highlight_as_html(file_id, false).unwrap(); | ||
73 | } | ||
74 | } | ||
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs new file mode 100644 index 000000000..3dcbe397a --- /dev/null +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs | |||
@@ -0,0 +1,184 @@ | |||
1 | //! Fully integrated benchmarks for rust-analyzer, which load real cargo | ||
2 | //! projects. | ||
3 | //! | ||
4 | //! The benchmark here is used to debug specific performance regressions. If you | ||
5 | //! notice that, eg, completion is slow in some specific case, you can modify | ||
6 | //! code here exercise this specific completion, and thus have a fast | ||
7 | //! edit/compile/test cycle. | ||
8 | //! | ||
9 | //! Note that "Rust Analyzer: Run" action does not allow running a single test | ||
10 | //! in release mode in VS Code. There's however "Rust Analyzer: Copy Run Command Line" | ||
11 | //! which you can use to paste the command in terminal and add `--release` manually. | ||
12 | |||
13 | use std::{convert::TryFrom, sync::Arc}; | ||
14 | |||
15 | use ide::{Change, CompletionConfig, FilePosition, TextSize}; | ||
16 | use ide_db::helpers::{insert_use::InsertUseConfig, merge_imports::MergeBehavior, SnippetCap}; | ||
17 | use test_utils::project_root; | ||
18 | use vfs::{AbsPathBuf, VfsPath}; | ||
19 | |||
20 | use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig}; | ||
21 | |||
22 | #[test] | ||
23 | fn integrated_highlighting_benchmark() { | ||
24 | if std::env::var("RUN_SLOW_BENCHES").is_err() { | ||
25 | return; | ||
26 | } | ||
27 | |||
28 | // Load rust-analyzer itself. | ||
29 | let workspace_to_load = project_root(); | ||
30 | let file = "./crates/ide_db/src/apply_change.rs"; | ||
31 | |||
32 | let cargo_config = Default::default(); | ||
33 | let load_cargo_config = LoadCargoConfig { | ||
34 | load_out_dirs_from_check: true, | ||
35 | wrap_rustc: false, | ||
36 | with_proc_macro: false, | ||
37 | }; | ||
38 | |||
39 | let (mut host, vfs, _proc_macro) = { | ||
40 | let _it = stdx::timeit("workspace loading"); | ||
41 | load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap() | ||
42 | }; | ||
43 | |||
44 | let file_id = { | ||
45 | let file = workspace_to_load.join(file); | ||
46 | let path = VfsPath::from(AbsPathBuf::assert(file)); | ||
47 | vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path)) | ||
48 | }; | ||
49 | |||
50 | { | ||
51 | let _it = stdx::timeit("initial"); | ||
52 | let analysis = host.analysis(); | ||
53 | analysis.highlight_as_html(file_id, false).unwrap(); | ||
54 | } | ||
55 | |||
56 | profile::init_from("*>100"); | ||
57 | // let _s = profile::heartbeat_span(); | ||
58 | |||
59 | { | ||
60 | let _it = stdx::timeit("change"); | ||
61 | let mut text = host.analysis().file_text(file_id).unwrap().to_string(); | ||
62 | text.push_str("\npub fn _dummy() {}\n"); | ||
63 | let mut change = Change::new(); | ||
64 | change.change_file(file_id, Some(Arc::new(text))); | ||
65 | host.apply_change(change); | ||
66 | } | ||
67 | |||
68 | { | ||
69 | let _it = stdx::timeit("after change"); | ||
70 | let _span = profile::cpu_span(); | ||
71 | let analysis = host.analysis(); | ||
72 | analysis.highlight_as_html(file_id, false).unwrap(); | ||
73 | } | ||
74 | } | ||
75 | |||
76 | #[test] | ||
77 | fn integrated_completion_benchmark() { | ||
78 | if std::env::var("RUN_SLOW_BENCHES").is_err() { | ||
79 | return; | ||
80 | } | ||
81 | |||
82 | // Load rust-analyzer itself. | ||
83 | let workspace_to_load = project_root(); | ||
84 | let file = "./crates/hir/src/lib.rs"; | ||
85 | |||
86 | let cargo_config = Default::default(); | ||
87 | let load_cargo_config = LoadCargoConfig { | ||
88 | load_out_dirs_from_check: true, | ||
89 | wrap_rustc: false, | ||
90 | with_proc_macro: false, | ||
91 | }; | ||
92 | |||
93 | let (mut host, vfs, _proc_macro) = { | ||
94 | let _it = stdx::timeit("workspace loading"); | ||
95 | load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap() | ||
96 | }; | ||
97 | |||
98 | let file_id = { | ||
99 | let file = workspace_to_load.join(file); | ||
100 | let path = VfsPath::from(AbsPathBuf::assert(file)); | ||
101 | vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path)) | ||
102 | }; | ||
103 | |||
104 | { | ||
105 | let _it = stdx::timeit("initial"); | ||
106 | let analysis = host.analysis(); | ||
107 | analysis.highlight_as_html(file_id, false).unwrap(); | ||
108 | } | ||
109 | |||
110 | profile::init_from("*>5"); | ||
111 | // let _s = profile::heartbeat_span(); | ||
112 | |||
113 | let completion_offset = { | ||
114 | let _it = stdx::timeit("change"); | ||
115 | let mut text = host.analysis().file_text(file_id).unwrap().to_string(); | ||
116 | let completion_offset = | ||
117 | patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)") | ||
118 | + "sel".len(); | ||
119 | let mut change = Change::new(); | ||
120 | change.change_file(file_id, Some(Arc::new(text))); | ||
121 | host.apply_change(change); | ||
122 | completion_offset | ||
123 | }; | ||
124 | |||
125 | { | ||
126 | let _it = stdx::timeit("unqualified path completion"); | ||
127 | let _span = profile::cpu_span(); | ||
128 | let analysis = host.analysis(); | ||
129 | let config = CompletionConfig { | ||
130 | enable_postfix_completions: true, | ||
131 | enable_imports_on_the_fly: true, | ||
132 | add_call_parenthesis: true, | ||
133 | add_call_argument_snippets: true, | ||
134 | snippet_cap: SnippetCap::new(true), | ||
135 | insert_use: InsertUseConfig { | ||
136 | merge: Some(MergeBehavior::Full), | ||
137 | prefix_kind: hir::PrefixKind::ByCrate, | ||
138 | group: true, | ||
139 | }, | ||
140 | }; | ||
141 | let position = | ||
142 | FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; | ||
143 | analysis.completions(&config, position).unwrap(); | ||
144 | } | ||
145 | |||
146 | let completion_offset = { | ||
147 | let _it = stdx::timeit("change"); | ||
148 | let mut text = host.analysis().file_text(file_id).unwrap().to_string(); | ||
149 | let completion_offset = | ||
150 | patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)") | ||
151 | + "self.".len(); | ||
152 | let mut change = Change::new(); | ||
153 | change.change_file(file_id, Some(Arc::new(text))); | ||
154 | host.apply_change(change); | ||
155 | completion_offset | ||
156 | }; | ||
157 | |||
158 | { | ||
159 | let _it = stdx::timeit("dot completion"); | ||
160 | let _span = profile::cpu_span(); | ||
161 | let analysis = host.analysis(); | ||
162 | let config = CompletionConfig { | ||
163 | enable_postfix_completions: true, | ||
164 | enable_imports_on_the_fly: true, | ||
165 | add_call_parenthesis: true, | ||
166 | add_call_argument_snippets: true, | ||
167 | snippet_cap: SnippetCap::new(true), | ||
168 | insert_use: InsertUseConfig { | ||
169 | merge: Some(MergeBehavior::Full), | ||
170 | prefix_kind: hir::PrefixKind::ByCrate, | ||
171 | group: true, | ||
172 | }, | ||
173 | }; | ||
174 | let position = | ||
175 | FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; | ||
176 | analysis.completions(&config, position).unwrap(); | ||
177 | } | ||
178 | } | ||
179 | |||
180 | fn patch(what: &mut String, from: &str, to: &str) -> usize { | ||
181 | let idx = what.find(from).unwrap(); | ||
182 | *what = what.replacen(from, to, 1); | ||
183 | idx | ||
184 | } | ||
diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index d9a5030a0..da7e24bec 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs | |||
@@ -40,7 +40,7 @@ pub mod lsp_ext; | |||
40 | pub mod config; | 40 | pub mod config; |
41 | 41 | ||
42 | #[cfg(test)] | 42 | #[cfg(test)] |
43 | mod benchmarks; | 43 | mod integrated_benchmarks; |
44 | 44 | ||
45 | use serde::de::DeserializeOwned; | 45 | use serde::de::DeserializeOwned; |
46 | use std::fmt; | 46 | use std::fmt; |