aboutsummaryrefslogtreecommitdiff
path: root/crates/hir
diff options
context:
space:
mode:
authorAleksey Kladov <[email protected]>2020-12-11 13:50:47 +0000
committerAleksey Kladov <[email protected]>2020-12-11 14:06:21 +0000
commit4015ff0e0bb34ee81cf99d7be678baaea2211574 (patch)
tree4e1e55fce21084fe59aa3413597dcde5f9079361 /crates/hir
parent15a644d6063aac86a5eb387412b96b824c67ded3 (diff)
Improve code structure
Make sure that there's only one entry point, analyze, remove awkard analyzer2 name
Diffstat (limited to 'crates/hir')
-rw-r--r--crates/hir/src/semantics.rs46
-rw-r--r--crates/hir/src/source_analyzer.rs2
2 files changed, 23 insertions, 25 deletions
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 4315ad48b..4bd22ed27 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -294,9 +294,8 @@ impl<'db> SemanticsImpl<'db> {
294 } 294 }
295 295
296 fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { 296 fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
297 let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); 297 let sa = self.analyze(macro_call.syntax());
298 let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); 298 let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
299 let file_id = sa.expand(self.db, macro_call)?;
300 let node = self.db.parse_or_expand(file_id)?; 299 let node = self.db.parse_or_expand(file_id)?;
301 self.cache(node.clone(), file_id); 300 self.cache(node.clone(), file_id);
302 Some(node) 301 Some(node)
@@ -308,9 +307,8 @@ impl<'db> SemanticsImpl<'db> {
308 hypothetical_args: &ast::TokenTree, 307 hypothetical_args: &ast::TokenTree,
309 token_to_map: SyntaxToken, 308 token_to_map: SyntaxToken,
310 ) -> Option<(SyntaxNode, SyntaxToken)> { 309 ) -> Option<(SyntaxNode, SyntaxToken)> {
311 let macro_call = 310 let sa = self.analyze(actual_macro_call.syntax());
312 self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); 311 let macro_call = InFile::new(sa.file_id, actual_macro_call);
313 let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
314 let krate = sa.resolver.krate()?; 312 let krate = sa.resolver.krate()?;
315 let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { 313 let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
316 sa.resolver.resolve_path_as_macro(self.db.upcast(), &path) 314 sa.resolver.resolve_path_as_macro(self.db.upcast(), &path)
@@ -326,10 +324,9 @@ impl<'db> SemanticsImpl<'db> {
326 fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { 324 fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
327 let _p = profile::span("descend_into_macros"); 325 let _p = profile::span("descend_into_macros");
328 let parent = token.parent(); 326 let parent = token.parent();
329 let parent = self.find_file(parent); 327 let sa = self.analyze(&parent);
330 let sa = self.analyze2(parent.as_ref(), None);
331 328
332 let token = successors(Some(parent.with_value(token)), |token| { 329 let token = successors(Some(InFile::new(sa.file_id, token)), |token| {
333 self.db.check_canceled(); 330 self.db.check_canceled();
334 let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; 331 let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
335 let tt = macro_call.token_tree()?; 332 let tt = macro_call.token_tree()?;
@@ -486,15 +483,13 @@ impl<'db> SemanticsImpl<'db> {
486 } 483 }
487 484
488 fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { 485 fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
489 let node = self.find_file(node.clone()); 486 let sa = self.analyze(node);
490 let resolver = self.analyze2(node.as_ref(), None).resolver; 487 SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
491 SemanticsScope { db: self.db, file_id: node.file_id, resolver }
492 } 488 }
493 489
494 fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { 490 fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
495 let node = self.find_file(node.clone()); 491 let sa = self.analyze_with_offset(node, offset);
496 let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; 492 SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
497 SemanticsScope { db: self.db, file_id: node.file_id, resolver }
498 } 493 }
499 494
500 fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { 495 fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
@@ -504,21 +499,24 @@ impl<'db> SemanticsImpl<'db> {
504 } 499 }
505 500
506 fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer { 501 fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
507 let src = self.find_file(node.clone()); 502 self.analyze_impl(node, None)
508 self.analyze2(src.as_ref(), None)
509 } 503 }
504 fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer {
505 self.analyze_impl(node, Some(offset))
506 }
507 fn analyze_impl(&self, node: &SyntaxNode, offset: Option<TextSize>) -> SourceAnalyzer {
508 let _p = profile::span("Semantics::analyze_impl");
509 let node = self.find_file(node.clone());
510 let node = node.as_ref();
510 511
511 fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextSize>) -> SourceAnalyzer { 512 let container = match self.with_ctx(|ctx| ctx.find_container(node)) {
512 let _p = profile::span("Semantics::analyze2");
513
514 let container = match self.with_ctx(|ctx| ctx.find_container(src)) {
515 Some(it) => it, 513 Some(it) => it,
516 None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src), 514 None => return SourceAnalyzer::new_for_resolver(Resolver::default(), node),
517 }; 515 };
518 516
519 let resolver = match container { 517 let resolver = match container {
520 ChildContainer::DefWithBodyId(def) => { 518 ChildContainer::DefWithBodyId(def) => {
521 return SourceAnalyzer::new_for_body(self.db, def, src, offset) 519 return SourceAnalyzer::new_for_body(self.db, def, node, offset)
522 } 520 }
523 ChildContainer::TraitId(it) => it.resolver(self.db.upcast()), 521 ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
524 ChildContainer::ImplId(it) => it.resolver(self.db.upcast()), 522 ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
@@ -528,7 +526,7 @@ impl<'db> SemanticsImpl<'db> {
528 ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()), 526 ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()),
529 ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()), 527 ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()),
530 }; 528 };
531 SourceAnalyzer::new_for_resolver(resolver, src) 529 SourceAnalyzer::new_for_resolver(resolver, node)
532 } 530 }
533 531
534 fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { 532 fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index 1aef0f33f..bf0c959fe 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -37,7 +37,7 @@ use base_db::CrateId;
37/// original source files. It should not be used inside the HIR itself. 37/// original source files. It should not be used inside the HIR itself.
38#[derive(Debug)] 38#[derive(Debug)]
39pub(crate) struct SourceAnalyzer { 39pub(crate) struct SourceAnalyzer {
40 file_id: HirFileId, 40 pub(crate) file_id: HirFileId,
41 pub(crate) resolver: Resolver, 41 pub(crate) resolver: Resolver,
42 body: Option<Arc<Body>>, 42 body: Option<Arc<Body>>,
43 body_source_map: Option<Arc<BodySourceMap>>, 43 body_source_map: Option<Arc<BodySourceMap>>,