diff options
Diffstat (limited to 'crates/ra_hir')
-rw-r--r-- | crates/ra_hir/src/semantics.rs | 18 | ||||
-rw-r--r-- | crates/ra_hir/src/source_analyzer.rs | 18 |
2 files changed, 18 insertions, 18 deletions
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs index 5d6edc45c..e09cf3185 100644 --- a/crates/ra_hir/src/semantics.rs +++ b/crates/ra_hir/src/semantics.rs | |||
@@ -14,7 +14,7 @@ use ra_db::{FileId, FileRange}; | |||
14 | use ra_prof::profile; | 14 | use ra_prof::profile; |
15 | use ra_syntax::{ | 15 | use ra_syntax::{ |
16 | algo::{find_node_at_offset, skip_trivia_token}, | 16 | algo::{find_node_at_offset, skip_trivia_token}, |
17 | ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextUnit, | 17 | ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextSize, |
18 | }; | 18 | }; |
19 | use rustc_hash::{FxHashMap, FxHashSet}; | 19 | use rustc_hash::{FxHashMap, FxHashSet}; |
20 | 20 | ||
@@ -95,7 +95,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
95 | let token = successors(Some(parent.with_value(token)), |token| { | 95 | let token = successors(Some(parent.with_value(token)), |token| { |
96 | let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; | 96 | let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; |
97 | let tt = macro_call.token_tree()?; | 97 | let tt = macro_call.token_tree()?; |
98 | if !token.value.text_range().is_subrange(&tt.syntax().text_range()) { | 98 | if !tt.syntax().text_range().contains_range(token.value.text_range()) { |
99 | return None; | 99 | return None; |
100 | } | 100 | } |
101 | let file_id = sa.expand(self.db, token.with_value(¯o_call))?; | 101 | let file_id = sa.expand(self.db, token.with_value(¯o_call))?; |
@@ -114,7 +114,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
114 | pub fn descend_node_at_offset<N: ast::AstNode>( | 114 | pub fn descend_node_at_offset<N: ast::AstNode>( |
115 | &self, | 115 | &self, |
116 | node: &SyntaxNode, | 116 | node: &SyntaxNode, |
117 | offset: TextUnit, | 117 | offset: TextSize, |
118 | ) -> Option<N> { | 118 | ) -> Option<N> { |
119 | // Handle macro token cases | 119 | // Handle macro token cases |
120 | node.token_at_offset(offset) | 120 | node.token_at_offset(offset) |
@@ -142,7 +142,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
142 | pub fn ancestors_at_offset_with_macros( | 142 | pub fn ancestors_at_offset_with_macros( |
143 | &self, | 143 | &self, |
144 | node: &SyntaxNode, | 144 | node: &SyntaxNode, |
145 | offset: TextUnit, | 145 | offset: TextSize, |
146 | ) -> impl Iterator<Item = SyntaxNode> + '_ { | 146 | ) -> impl Iterator<Item = SyntaxNode> + '_ { |
147 | node.token_at_offset(offset) | 147 | node.token_at_offset(offset) |
148 | .map(|token| self.ancestors_with_macros(token.parent())) | 148 | .map(|token| self.ancestors_with_macros(token.parent())) |
@@ -154,7 +154,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
154 | pub fn find_node_at_offset_with_macros<N: AstNode>( | 154 | pub fn find_node_at_offset_with_macros<N: AstNode>( |
155 | &self, | 155 | &self, |
156 | node: &SyntaxNode, | 156 | node: &SyntaxNode, |
157 | offset: TextUnit, | 157 | offset: TextSize, |
158 | ) -> Option<N> { | 158 | ) -> Option<N> { |
159 | self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) | 159 | self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) |
160 | } | 160 | } |
@@ -164,7 +164,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
164 | pub fn find_node_at_offset_with_descend<N: AstNode>( | 164 | pub fn find_node_at_offset_with_descend<N: AstNode>( |
165 | &self, | 165 | &self, |
166 | node: &SyntaxNode, | 166 | node: &SyntaxNode, |
167 | offset: TextUnit, | 167 | offset: TextSize, |
168 | ) -> Option<N> { | 168 | ) -> Option<N> { |
169 | if let Some(it) = find_node_at_offset(&node, offset) { | 169 | if let Some(it) = find_node_at_offset(&node, offset) { |
170 | return Some(it); | 170 | return Some(it); |
@@ -255,7 +255,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
255 | SemanticsScope { db: self.db, resolver } | 255 | SemanticsScope { db: self.db, resolver } |
256 | } | 256 | } |
257 | 257 | ||
258 | pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextUnit) -> SemanticsScope<'db, DB> { | 258 | pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db, DB> { |
259 | let node = self.find_file(node.clone()); | 259 | let node = self.find_file(node.clone()); |
260 | let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; | 260 | let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; |
261 | SemanticsScope { db: self.db, resolver } | 261 | SemanticsScope { db: self.db, resolver } |
@@ -271,7 +271,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
271 | self.analyze2(src.as_ref(), None) | 271 | self.analyze2(src.as_ref(), None) |
272 | } | 272 | } |
273 | 273 | ||
274 | fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextUnit>) -> SourceAnalyzer { | 274 | fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextSize>) -> SourceAnalyzer { |
275 | let _p = profile("Semantics::analyze2"); | 275 | let _p = profile("Semantics::analyze2"); |
276 | 276 | ||
277 | let container = match self.with_ctx(|ctx| ctx.find_container(src)) { | 277 | let container = match self.with_ctx(|ctx| ctx.find_container(src)) { |
@@ -463,7 +463,7 @@ fn original_range_opt( | |||
463 | return None; | 463 | return None; |
464 | } | 464 | } |
465 | 465 | ||
466 | Some(first.with_value(first.value.text_range().extend_to(&last.value.text_range()))) | 466 | Some(first.with_value(first.value.text_range().cover(last.value.text_range()))) |
467 | })?) | 467 | })?) |
468 | } | 468 | } |
469 | 469 | ||
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs index 0ed6d0958..59a3a17d2 100644 --- a/crates/ra_hir/src/source_analyzer.rs +++ b/crates/ra_hir/src/source_analyzer.rs | |||
@@ -23,7 +23,7 @@ use hir_ty::{ | |||
23 | }; | 23 | }; |
24 | use ra_syntax::{ | 24 | use ra_syntax::{ |
25 | ast::{self, AstNode}, | 25 | ast::{self, AstNode}, |
26 | SyntaxNode, TextRange, TextUnit, | 26 | SyntaxNode, TextRange, TextSize, |
27 | }; | 27 | }; |
28 | 28 | ||
29 | use crate::{ | 29 | use crate::{ |
@@ -50,7 +50,7 @@ impl SourceAnalyzer { | |||
50 | db: &dyn HirDatabase, | 50 | db: &dyn HirDatabase, |
51 | def: DefWithBodyId, | 51 | def: DefWithBodyId, |
52 | node: InFile<&SyntaxNode>, | 52 | node: InFile<&SyntaxNode>, |
53 | offset: Option<TextUnit>, | 53 | offset: Option<TextSize>, |
54 | ) -> SourceAnalyzer { | 54 | ) -> SourceAnalyzer { |
55 | let (body, source_map) = db.body_with_source_map(def); | 55 | let (body, source_map) = db.body_with_source_map(def); |
56 | let scopes = db.expr_scopes(def); | 56 | let scopes = db.expr_scopes(def); |
@@ -318,7 +318,7 @@ fn scope_for_offset( | |||
318 | db: &dyn HirDatabase, | 318 | db: &dyn HirDatabase, |
319 | scopes: &ExprScopes, | 319 | scopes: &ExprScopes, |
320 | source_map: &BodySourceMap, | 320 | source_map: &BodySourceMap, |
321 | offset: InFile<TextUnit>, | 321 | offset: InFile<TextSize>, |
322 | ) -> Option<ScopeId> { | 322 | ) -> Option<ScopeId> { |
323 | scopes | 323 | scopes |
324 | .scope_by_expr() | 324 | .scope_by_expr() |
@@ -354,7 +354,7 @@ fn adjust( | |||
354 | source_map: &BodySourceMap, | 354 | source_map: &BodySourceMap, |
355 | expr_range: TextRange, | 355 | expr_range: TextRange, |
356 | file_id: HirFileId, | 356 | file_id: HirFileId, |
357 | offset: TextUnit, | 357 | offset: TextSize, |
358 | ) -> Option<ScopeId> { | 358 | ) -> Option<ScopeId> { |
359 | let child_scopes = scopes | 359 | let child_scopes = scopes |
360 | .scope_by_expr() | 360 | .scope_by_expr() |
@@ -369,15 +369,15 @@ fn adjust( | |||
369 | let node = source.value.to_node(&root); | 369 | let node = source.value.to_node(&root); |
370 | Some((node.syntax().text_range(), scope)) | 370 | Some((node.syntax().text_range(), scope)) |
371 | }) | 371 | }) |
372 | .filter(|(range, _)| { | 372 | .filter(|&(range, _)| { |
373 | range.start() <= offset && range.is_subrange(&expr_range) && *range != expr_range | 373 | range.start() <= offset && expr_range.contains_range(range) && range != expr_range |
374 | }); | 374 | }); |
375 | 375 | ||
376 | child_scopes | 376 | child_scopes |
377 | .max_by(|(r1, _), (r2, _)| { | 377 | .max_by(|&(r1, _), &(r2, _)| { |
378 | if r2.is_subrange(&r1) { | 378 | if r1.contains_range(r2) { |
379 | std::cmp::Ordering::Greater | 379 | std::cmp::Ordering::Greater |
380 | } else if r1.is_subrange(&r2) { | 380 | } else if r2.contains_range(r1) { |
381 | std::cmp::Ordering::Less | 381 | std::cmp::Ordering::Less |
382 | } else { | 382 | } else { |
383 | r1.start().cmp(&r2.start()) | 383 | r1.start().cmp(&r2.start()) |