diff options
author | bors[bot] <26634292+bors[bot]@users.noreply.github.com> | 2021-03-16 13:14:48 +0000 |
---|---|---|
committer | GitHub <[email protected]> | 2021-03-16 13:14:48 +0000 |
commit | c49b5b7468a9954af86fd1724276261f396aba5d (patch) | |
tree | f9b9126cd0cc9a2829de3cdb20f681b354fbe67b /crates/hir/src/semantics.rs | |
parent | 1a82af3527e476d52410ff4dfd2fb4c57466abcb (diff) | |
parent | f5a81ec4683613bd62624811733345d627f2127b (diff) |
Merge #7498
7498: Clone for update r=matklad a=matklad
rowan counterpart https://github.com/rust-analyzer/rowan/pull/93
#6857
Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates/hir/src/semantics.rs')
-rw-r--r-- | crates/hir/src/semantics.rs | 30 |
1 files changed, 24 insertions, 6 deletions
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 519339c0c..03c9371b5 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs | |||
@@ -143,6 +143,12 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
143 | self.imp.diagnostics_display_range(diagnostics) | 143 | self.imp.diagnostics_display_range(diagnostics) |
144 | } | 144 | } |
145 | 145 | ||
146 | pub fn token_ancestors_with_macros( | ||
147 | &self, | ||
148 | token: SyntaxToken, | ||
149 | ) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
150 | token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it)) | ||
151 | } | ||
146 | pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { | 152 | pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { |
147 | self.imp.ancestors_with_macros(node) | 153 | self.imp.ancestors_with_macros(node) |
148 | } | 154 | } |
@@ -270,8 +276,8 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
270 | self.imp.scope(node) | 276 | self.imp.scope(node) |
271 | } | 277 | } |
272 | 278 | ||
273 | pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { | 279 | pub fn scope_at_offset(&self, token: &SyntaxToken, offset: TextSize) -> SemanticsScope<'db> { |
274 | self.imp.scope_at_offset(node, offset) | 280 | self.imp.scope_at_offset(&token.parent().unwrap(), offset) |
275 | } | 281 | } |
276 | 282 | ||
277 | pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { | 283 | pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { |
@@ -341,7 +347,10 @@ impl<'db> SemanticsImpl<'db> { | |||
341 | 347 | ||
342 | fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { | 348 | fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { |
343 | let _p = profile::span("descend_into_macros"); | 349 | let _p = profile::span("descend_into_macros"); |
344 | let parent = token.parent(); | 350 | let parent = match token.parent() { |
351 | Some(it) => it, | ||
352 | None => return token, | ||
353 | }; | ||
345 | let sa = self.analyze(&parent); | 354 | let sa = self.analyze(&parent); |
346 | 355 | ||
347 | let token = successors(Some(InFile::new(sa.file_id, token)), |token| { | 356 | let token = successors(Some(InFile::new(sa.file_id, token)), |token| { |
@@ -360,7 +369,9 @@ impl<'db> SemanticsImpl<'db> { | |||
360 | .as_ref()? | 369 | .as_ref()? |
361 | .map_token_down(token.as_ref())?; | 370 | .map_token_down(token.as_ref())?; |
362 | 371 | ||
363 | self.cache(find_root(&token.value.parent()), token.file_id); | 372 | if let Some(parent) = token.value.parent() { |
373 | self.cache(find_root(&parent), token.file_id); | ||
374 | } | ||
364 | 375 | ||
365 | Some(token) | 376 | Some(token) |
366 | }) | 377 | }) |
@@ -378,7 +389,7 @@ impl<'db> SemanticsImpl<'db> { | |||
378 | // Handle macro token cases | 389 | // Handle macro token cases |
379 | node.token_at_offset(offset) | 390 | node.token_at_offset(offset) |
380 | .map(|token| self.descend_into_macros(token)) | 391 | .map(|token| self.descend_into_macros(token)) |
381 | .map(|it| self.ancestors_with_macros(it.parent())) | 392 | .map(|it| self.token_ancestors_with_macros(it)) |
382 | .flatten() | 393 | .flatten() |
383 | } | 394 | } |
384 | 395 | ||
@@ -394,6 +405,13 @@ impl<'db> SemanticsImpl<'db> { | |||
394 | src.with_value(&node).original_file_range(self.db.upcast()) | 405 | src.with_value(&node).original_file_range(self.db.upcast()) |
395 | } | 406 | } |
396 | 407 | ||
408 | fn token_ancestors_with_macros( | ||
409 | &self, | ||
410 | token: SyntaxToken, | ||
411 | ) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
412 | token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent)) | ||
413 | } | ||
414 | |||
397 | fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { | 415 | fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { |
398 | let node = self.find_file(node); | 416 | let node = self.find_file(node); |
399 | node.ancestors_with_macros(self.db.upcast()).map(|it| it.value) | 417 | node.ancestors_with_macros(self.db.upcast()).map(|it| it.value) |
@@ -405,7 +423,7 @@ impl<'db> SemanticsImpl<'db> { | |||
405 | offset: TextSize, | 423 | offset: TextSize, |
406 | ) -> impl Iterator<Item = SyntaxNode> + '_ { | 424 | ) -> impl Iterator<Item = SyntaxNode> + '_ { |
407 | node.token_at_offset(offset) | 425 | node.token_at_offset(offset) |
408 | .map(|token| self.ancestors_with_macros(token.parent())) | 426 | .map(|token| self.token_ancestors_with_macros(token)) |
409 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) | 427 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) |
410 | } | 428 | } |
411 | 429 | ||