diff options
Diffstat (limited to 'crates/ra_hir/src/semantics.rs')
-rw-r--r-- | crates/ra_hir/src/semantics.rs | 18 |
1 files changed, 9 insertions, 9 deletions
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs index 5d6edc45c..e09cf3185 100644 --- a/crates/ra_hir/src/semantics.rs +++ b/crates/ra_hir/src/semantics.rs | |||
@@ -14,7 +14,7 @@ use ra_db::{FileId, FileRange}; | |||
14 | use ra_prof::profile; | 14 | use ra_prof::profile; |
15 | use ra_syntax::{ | 15 | use ra_syntax::{ |
16 | algo::{find_node_at_offset, skip_trivia_token}, | 16 | algo::{find_node_at_offset, skip_trivia_token}, |
17 | ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextUnit, | 17 | ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextSize, |
18 | }; | 18 | }; |
19 | use rustc_hash::{FxHashMap, FxHashSet}; | 19 | use rustc_hash::{FxHashMap, FxHashSet}; |
20 | 20 | ||
@@ -95,7 +95,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
95 | let token = successors(Some(parent.with_value(token)), |token| { | 95 | let token = successors(Some(parent.with_value(token)), |token| { |
96 | let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; | 96 | let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; |
97 | let tt = macro_call.token_tree()?; | 97 | let tt = macro_call.token_tree()?; |
98 | if !token.value.text_range().is_subrange(&tt.syntax().text_range()) { | 98 | if !tt.syntax().text_range().contains_range(token.value.text_range()) { |
99 | return None; | 99 | return None; |
100 | } | 100 | } |
101 | let file_id = sa.expand(self.db, token.with_value(¯o_call))?; | 101 | let file_id = sa.expand(self.db, token.with_value(¯o_call))?; |
@@ -114,7 +114,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
114 | pub fn descend_node_at_offset<N: ast::AstNode>( | 114 | pub fn descend_node_at_offset<N: ast::AstNode>( |
115 | &self, | 115 | &self, |
116 | node: &SyntaxNode, | 116 | node: &SyntaxNode, |
117 | offset: TextUnit, | 117 | offset: TextSize, |
118 | ) -> Option<N> { | 118 | ) -> Option<N> { |
119 | // Handle macro token cases | 119 | // Handle macro token cases |
120 | node.token_at_offset(offset) | 120 | node.token_at_offset(offset) |
@@ -142,7 +142,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
142 | pub fn ancestors_at_offset_with_macros( | 142 | pub fn ancestors_at_offset_with_macros( |
143 | &self, | 143 | &self, |
144 | node: &SyntaxNode, | 144 | node: &SyntaxNode, |
145 | offset: TextUnit, | 145 | offset: TextSize, |
146 | ) -> impl Iterator<Item = SyntaxNode> + '_ { | 146 | ) -> impl Iterator<Item = SyntaxNode> + '_ { |
147 | node.token_at_offset(offset) | 147 | node.token_at_offset(offset) |
148 | .map(|token| self.ancestors_with_macros(token.parent())) | 148 | .map(|token| self.ancestors_with_macros(token.parent())) |
@@ -154,7 +154,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
154 | pub fn find_node_at_offset_with_macros<N: AstNode>( | 154 | pub fn find_node_at_offset_with_macros<N: AstNode>( |
155 | &self, | 155 | &self, |
156 | node: &SyntaxNode, | 156 | node: &SyntaxNode, |
157 | offset: TextUnit, | 157 | offset: TextSize, |
158 | ) -> Option<N> { | 158 | ) -> Option<N> { |
159 | self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) | 159 | self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) |
160 | } | 160 | } |
@@ -164,7 +164,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
164 | pub fn find_node_at_offset_with_descend<N: AstNode>( | 164 | pub fn find_node_at_offset_with_descend<N: AstNode>( |
165 | &self, | 165 | &self, |
166 | node: &SyntaxNode, | 166 | node: &SyntaxNode, |
167 | offset: TextUnit, | 167 | offset: TextSize, |
168 | ) -> Option<N> { | 168 | ) -> Option<N> { |
169 | if let Some(it) = find_node_at_offset(&node, offset) { | 169 | if let Some(it) = find_node_at_offset(&node, offset) { |
170 | return Some(it); | 170 | return Some(it); |
@@ -255,7 +255,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
255 | SemanticsScope { db: self.db, resolver } | 255 | SemanticsScope { db: self.db, resolver } |
256 | } | 256 | } |
257 | 257 | ||
258 | pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextUnit) -> SemanticsScope<'db, DB> { | 258 | pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db, DB> { |
259 | let node = self.find_file(node.clone()); | 259 | let node = self.find_file(node.clone()); |
260 | let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; | 260 | let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; |
261 | SemanticsScope { db: self.db, resolver } | 261 | SemanticsScope { db: self.db, resolver } |
@@ -271,7 +271,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
271 | self.analyze2(src.as_ref(), None) | 271 | self.analyze2(src.as_ref(), None) |
272 | } | 272 | } |
273 | 273 | ||
274 | fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextUnit>) -> SourceAnalyzer { | 274 | fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextSize>) -> SourceAnalyzer { |
275 | let _p = profile("Semantics::analyze2"); | 275 | let _p = profile("Semantics::analyze2"); |
276 | 276 | ||
277 | let container = match self.with_ctx(|ctx| ctx.find_container(src)) { | 277 | let container = match self.with_ctx(|ctx| ctx.find_container(src)) { |
@@ -463,7 +463,7 @@ fn original_range_opt( | |||
463 | return None; | 463 | return None; |
464 | } | 464 | } |
465 | 465 | ||
466 | Some(first.with_value(first.value.text_range().extend_to(&last.value.text_range()))) | 466 | Some(first.with_value(first.value.text_range().cover(last.value.text_range()))) |
467 | })?) | 467 | })?) |
468 | } | 468 | } |
469 | 469 | ||