diff options
-rw-r--r-- | crates/hir/src/semantics.rs | 46 | ||||
-rw-r--r-- | crates/hir/src/source_analyzer.rs | 2 | ||||
-rw-r--r-- | crates/ide/src/hover.rs | 16 | ||||
-rw-r--r-- | crates/rust-analyzer/src/cli.rs | 23 | ||||
-rw-r--r-- | crates/rust-analyzer/src/cli/analysis_bench.rs | 5 | ||||
-rw-r--r-- | crates/rust-analyzer/src/cli/analysis_stats.rs | 8 | ||||
-rw-r--r-- | crates/rust-analyzer/src/lib.rs | 21 | ||||
-rw-r--r-- | xtask/src/metrics.rs | 4 |
8 files changed, 68 insertions, 57 deletions
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 4315ad48b..4bd22ed27 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs | |||
@@ -294,9 +294,8 @@ impl<'db> SemanticsImpl<'db> { | |||
294 | } | 294 | } |
295 | 295 | ||
296 | fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { | 296 | fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { |
297 | let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); | 297 | let sa = self.analyze(macro_call.syntax()); |
298 | let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); | 298 | let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?; |
299 | let file_id = sa.expand(self.db, macro_call)?; | ||
300 | let node = self.db.parse_or_expand(file_id)?; | 299 | let node = self.db.parse_or_expand(file_id)?; |
301 | self.cache(node.clone(), file_id); | 300 | self.cache(node.clone(), file_id); |
302 | Some(node) | 301 | Some(node) |
@@ -308,9 +307,8 @@ impl<'db> SemanticsImpl<'db> { | |||
308 | hypothetical_args: &ast::TokenTree, | 307 | hypothetical_args: &ast::TokenTree, |
309 | token_to_map: SyntaxToken, | 308 | token_to_map: SyntaxToken, |
310 | ) -> Option<(SyntaxNode, SyntaxToken)> { | 309 | ) -> Option<(SyntaxNode, SyntaxToken)> { |
311 | let macro_call = | 310 | let sa = self.analyze(actual_macro_call.syntax()); |
312 | self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); | 311 | let macro_call = InFile::new(sa.file_id, actual_macro_call); |
313 | let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); | ||
314 | let krate = sa.resolver.krate()?; | 312 | let krate = sa.resolver.krate()?; |
315 | let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { | 313 | let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { |
316 | sa.resolver.resolve_path_as_macro(self.db.upcast(), &path) | 314 | sa.resolver.resolve_path_as_macro(self.db.upcast(), &path) |
@@ -326,10 +324,9 @@ impl<'db> SemanticsImpl<'db> { | |||
326 | fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { | 324 | fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { |
327 | let _p = profile::span("descend_into_macros"); | 325 | let _p = profile::span("descend_into_macros"); |
328 | let parent = token.parent(); | 326 | let parent = token.parent(); |
329 | let parent = self.find_file(parent); | 327 | let sa = self.analyze(&parent); |
330 | let sa = self.analyze2(parent.as_ref(), None); | ||
331 | 328 | ||
332 | let token = successors(Some(parent.with_value(token)), |token| { | 329 | let token = successors(Some(InFile::new(sa.file_id, token)), |token| { |
333 | self.db.check_canceled(); | 330 | self.db.check_canceled(); |
334 | let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; | 331 | let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; |
335 | let tt = macro_call.token_tree()?; | 332 | let tt = macro_call.token_tree()?; |
@@ -486,15 +483,13 @@ impl<'db> SemanticsImpl<'db> { | |||
486 | } | 483 | } |
487 | 484 | ||
488 | fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { | 485 | fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { |
489 | let node = self.find_file(node.clone()); | 486 | let sa = self.analyze(node); |
490 | let resolver = self.analyze2(node.as_ref(), None).resolver; | 487 | SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver } |
491 | SemanticsScope { db: self.db, file_id: node.file_id, resolver } | ||
492 | } | 488 | } |
493 | 489 | ||
494 | fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { | 490 | fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { |
495 | let node = self.find_file(node.clone()); | 491 | let sa = self.analyze_with_offset(node, offset); |
496 | let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; | 492 | SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver } |
497 | SemanticsScope { db: self.db, file_id: node.file_id, resolver } | ||
498 | } | 493 | } |
499 | 494 | ||
500 | fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { | 495 | fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { |
@@ -504,21 +499,24 @@ impl<'db> SemanticsImpl<'db> { | |||
504 | } | 499 | } |
505 | 500 | ||
506 | fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer { | 501 | fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer { |
507 | let src = self.find_file(node.clone()); | 502 | self.analyze_impl(node, None) |
508 | self.analyze2(src.as_ref(), None) | ||
509 | } | 503 | } |
504 | fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer { | ||
505 | self.analyze_impl(node, Some(offset)) | ||
506 | } | ||
507 | fn analyze_impl(&self, node: &SyntaxNode, offset: Option<TextSize>) -> SourceAnalyzer { | ||
508 | let _p = profile::span("Semantics::analyze_impl"); | ||
509 | let node = self.find_file(node.clone()); | ||
510 | let node = node.as_ref(); | ||
510 | 511 | ||
511 | fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextSize>) -> SourceAnalyzer { | 512 | let container = match self.with_ctx(|ctx| ctx.find_container(node)) { |
512 | let _p = profile::span("Semantics::analyze2"); | ||
513 | |||
514 | let container = match self.with_ctx(|ctx| ctx.find_container(src)) { | ||
515 | Some(it) => it, | 513 | Some(it) => it, |
516 | None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src), | 514 | None => return SourceAnalyzer::new_for_resolver(Resolver::default(), node), |
517 | }; | 515 | }; |
518 | 516 | ||
519 | let resolver = match container { | 517 | let resolver = match container { |
520 | ChildContainer::DefWithBodyId(def) => { | 518 | ChildContainer::DefWithBodyId(def) => { |
521 | return SourceAnalyzer::new_for_body(self.db, def, src, offset) | 519 | return SourceAnalyzer::new_for_body(self.db, def, node, offset) |
522 | } | 520 | } |
523 | ChildContainer::TraitId(it) => it.resolver(self.db.upcast()), | 521 | ChildContainer::TraitId(it) => it.resolver(self.db.upcast()), |
524 | ChildContainer::ImplId(it) => it.resolver(self.db.upcast()), | 522 | ChildContainer::ImplId(it) => it.resolver(self.db.upcast()), |
@@ -528,7 +526,7 @@ impl<'db> SemanticsImpl<'db> { | |||
528 | ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()), | 526 | ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()), |
529 | ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()), | 527 | ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()), |
530 | }; | 528 | }; |
531 | SourceAnalyzer::new_for_resolver(resolver, src) | 529 | SourceAnalyzer::new_for_resolver(resolver, node) |
532 | } | 530 | } |
533 | 531 | ||
534 | fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { | 532 | fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { |
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 1aef0f33f..bf0c959fe 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs | |||
@@ -37,7 +37,7 @@ use base_db::CrateId; | |||
37 | /// original source files. It should not be used inside the HIR itself. | 37 | /// original source files. It should not be used inside the HIR itself. |
38 | #[derive(Debug)] | 38 | #[derive(Debug)] |
39 | pub(crate) struct SourceAnalyzer { | 39 | pub(crate) struct SourceAnalyzer { |
40 | file_id: HirFileId, | 40 | pub(crate) file_id: HirFileId, |
41 | pub(crate) resolver: Resolver, | 41 | pub(crate) resolver: Resolver, |
42 | body: Option<Arc<Body>>, | 42 | body: Option<Arc<Body>>, |
43 | body_source_map: Option<Arc<BodySourceMap>>, | 43 | body_source_map: Option<Arc<BodySourceMap>>, |
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index cf04c3de0..ab017d2ad 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs | |||
@@ -139,6 +139,11 @@ pub(crate) fn hover( | |||
139 | } | 139 | } |
140 | } | 140 | } |
141 | 141 | ||
142 | if token.kind() == syntax::SyntaxKind::COMMENT { | ||
143 | // don't highlight the entire parent node on comment hover | ||
144 | return None; | ||
145 | } | ||
146 | |||
142 | let node = token.ancestors().find(|n| { | 147 | let node = token.ancestors().find(|n| { |
143 | ast::Expr::can_cast(n.kind()) | 148 | ast::Expr::can_cast(n.kind()) |
144 | || ast::Pat::can_cast(n.kind()) | 149 | || ast::Pat::can_cast(n.kind()) |
@@ -3419,4 +3424,15 @@ mod Foo<|> { | |||
3419 | "#]], | 3424 | "#]], |
3420 | ); | 3425 | ); |
3421 | } | 3426 | } |
3427 | |||
3428 | #[test] | ||
3429 | fn hover_comments_dont_highlight_parent() { | ||
3430 | check_hover_no_result( | ||
3431 | r#" | ||
3432 | fn no_hover() { | ||
3433 | // no<|>hover | ||
3434 | } | ||
3435 | "#, | ||
3436 | ); | ||
3437 | } | ||
3422 | } | 3438 | } |
diff --git a/crates/rust-analyzer/src/cli.rs b/crates/rust-analyzer/src/cli.rs index 6966ee576..6879a462d 100644 --- a/crates/rust-analyzer/src/cli.rs +++ b/crates/rust-analyzer/src/cli.rs | |||
@@ -10,8 +10,9 @@ mod ssr; | |||
10 | use std::io::Read; | 10 | use std::io::Read; |
11 | 11 | ||
12 | use anyhow::Result; | 12 | use anyhow::Result; |
13 | use ide::Analysis; | 13 | use ide::{Analysis, AnalysisHost}; |
14 | use syntax::{AstNode, SourceFile}; | 14 | use syntax::{AstNode, SourceFile}; |
15 | use vfs::Vfs; | ||
15 | 16 | ||
16 | pub use self::{ | 17 | pub use self::{ |
17 | analysis_bench::{BenchCmd, BenchWhat, Position}, | 18 | analysis_bench::{BenchCmd, BenchWhat, Position}, |
@@ -82,3 +83,23 @@ fn report_metric(metric: &str, value: u64, unit: &str) { | |||
82 | } | 83 | } |
83 | println!("METRIC:{}:{}:{}", metric, value, unit) | 84 | println!("METRIC:{}:{}:{}", metric, value, unit) |
84 | } | 85 | } |
86 | |||
87 | fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) { | ||
88 | let mut mem = host.per_query_memory_usage(); | ||
89 | |||
90 | let before = profile::memory_usage(); | ||
91 | drop(vfs); | ||
92 | let vfs = before.allocated - profile::memory_usage().allocated; | ||
93 | mem.push(("VFS".into(), vfs)); | ||
94 | |||
95 | let before = profile::memory_usage(); | ||
96 | drop(host); | ||
97 | mem.push(("Unaccounted".into(), before.allocated - profile::memory_usage().allocated)); | ||
98 | |||
99 | mem.push(("Remaining".into(), profile::memory_usage().allocated)); | ||
100 | |||
101 | for (name, bytes) in mem { | ||
102 | // NOTE: Not a debug print, so avoid going through the `eprintln` defined above. | ||
103 | eprintln!("{:>8} {}", bytes, name); | ||
104 | } | ||
105 | } | ||
diff --git a/crates/rust-analyzer/src/cli/analysis_bench.rs b/crates/rust-analyzer/src/cli/analysis_bench.rs index 8e33986d5..5a8484c62 100644 --- a/crates/rust-analyzer/src/cli/analysis_bench.rs +++ b/crates/rust-analyzer/src/cli/analysis_bench.rs | |||
@@ -12,10 +12,7 @@ use ide_db::base_db::{ | |||
12 | }; | 12 | }; |
13 | use vfs::AbsPathBuf; | 13 | use vfs::AbsPathBuf; |
14 | 14 | ||
15 | use crate::{ | 15 | use crate::cli::{load_cargo::load_cargo, print_memory_usage, Verbosity}; |
16 | cli::{load_cargo::load_cargo, Verbosity}, | ||
17 | print_memory_usage, | ||
18 | }; | ||
19 | 16 | ||
20 | pub struct BenchCmd { | 17 | pub struct BenchCmd { |
21 | pub path: PathBuf, | 18 | pub path: PathBuf, |
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 58d284d47..a23fb7a33 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs | |||
@@ -23,11 +23,9 @@ use rustc_hash::FxHashSet; | |||
23 | use stdx::format_to; | 23 | use stdx::format_to; |
24 | use syntax::AstNode; | 24 | use syntax::AstNode; |
25 | 25 | ||
26 | use crate::{ | 26 | use crate::cli::{ |
27 | cli::{ | 27 | load_cargo::load_cargo, print_memory_usage, progress_report::ProgressReport, report_metric, |
28 | load_cargo::load_cargo, progress_report::ProgressReport, report_metric, Result, Verbosity, | 28 | Result, Verbosity, |
29 | }, | ||
30 | print_memory_usage, | ||
31 | }; | 29 | }; |
32 | use profile::StopWatch; | 30 | use profile::StopWatch; |
33 | 31 | ||
diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index ad08f1afb..79fe30e53 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs | |||
@@ -37,10 +37,8 @@ mod document; | |||
37 | pub mod lsp_ext; | 37 | pub mod lsp_ext; |
38 | pub mod config; | 38 | pub mod config; |
39 | 39 | ||
40 | use ide::AnalysisHost; | ||
41 | use serde::de::DeserializeOwned; | 40 | use serde::de::DeserializeOwned; |
42 | use std::fmt; | 41 | use std::fmt; |
43 | use vfs::Vfs; | ||
44 | 42 | ||
45 | pub use crate::{caps::server_capabilities, main_loop::main_loop}; | 43 | pub use crate::{caps::server_capabilities, main_loop::main_loop}; |
46 | 44 | ||
@@ -72,22 +70,3 @@ impl fmt::Display for LspError { | |||
72 | } | 70 | } |
73 | 71 | ||
74 | impl std::error::Error for LspError {} | 72 | impl std::error::Error for LspError {} |
75 | |||
76 | fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) { | ||
77 | let mut mem = host.per_query_memory_usage(); | ||
78 | |||
79 | let before = profile::memory_usage(); | ||
80 | drop(vfs); | ||
81 | let vfs = before.allocated - profile::memory_usage().allocated; | ||
82 | mem.push(("VFS".into(), vfs)); | ||
83 | |||
84 | let before = profile::memory_usage(); | ||
85 | drop(host); | ||
86 | mem.push(("Unaccounted".into(), before.allocated - profile::memory_usage().allocated)); | ||
87 | |||
88 | mem.push(("Remaining".into(), profile::memory_usage().allocated)); | ||
89 | |||
90 | for (name, bytes) in mem { | ||
91 | eprintln!("{:>8} {}", bytes, name); | ||
92 | } | ||
93 | } | ||
diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index e0d1aaf97..624ad3b7e 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs | |||
@@ -81,7 +81,9 @@ impl Metrics { | |||
81 | } | 81 | } |
82 | fn measure_analysis_stats_path(&mut self, name: &str, path: &str) -> Result<()> { | 82 | fn measure_analysis_stats_path(&mut self, name: &str, path: &str) -> Result<()> { |
83 | eprintln!("\nMeasuring analysis-stats/{}", name); | 83 | eprintln!("\nMeasuring analysis-stats/{}", name); |
84 | let output = cmd!("./target/release/rust-analyzer analysis-stats --quiet {path}").read()?; | 84 | let output = |
85 | cmd!("./target/release/rust-analyzer analysis-stats --quiet --memory-usage {path}") | ||
86 | .read()?; | ||
85 | for (metric, value, unit) in parse_metrics(&output) { | 87 | for (metric, value, unit) in parse_metrics(&output) { |
86 | self.report(&format!("analysis-stats/{}/{}", name, metric), value, unit.into()); | 88 | self.report(&format!("analysis-stats/{}/{}", name, metric), value, unit.into()); |
87 | } | 89 | } |