diff options
Diffstat (limited to 'crates/syntax/src')
-rw-r--r-- | crates/syntax/src/algo.rs | 6 | ||||
-rw-r--r-- | crates/syntax/src/parsing/reparsing.rs | 5 |
2 files changed, 3 insertions, 8 deletions
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs index 384d031e7..1456270d0 100644 --- a/crates/syntax/src/algo.rs +++ b/crates/syntax/src/algo.rs | |||
@@ -45,7 +45,7 @@ pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextSize) -> | |||
45 | } | 45 | } |
46 | 46 | ||
47 | pub fn find_node_at_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<N> { | 47 | pub fn find_node_at_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<N> { |
48 | find_covering_element(syntax, range).ancestors().find_map(N::cast) | 48 | syntax.covering_element(range).ancestors().find_map(N::cast) |
49 | } | 49 | } |
50 | 50 | ||
51 | /// Skip to next non `trivia` token | 51 | /// Skip to next non `trivia` token |
@@ -74,10 +74,6 @@ pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Optio | |||
74 | } | 74 | } |
75 | } | 75 | } |
76 | 76 | ||
77 | pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement { | ||
78 | root.covering_element(range) | ||
79 | } | ||
80 | |||
81 | pub fn least_common_ancestor(u: &SyntaxNode, v: &SyntaxNode) -> Option<SyntaxNode> { | 77 | pub fn least_common_ancestor(u: &SyntaxNode, v: &SyntaxNode) -> Option<SyntaxNode> { |
82 | if u == v { | 78 | if u == v { |
83 | return Some(u.clone()); | 79 | return Some(u.clone()); |
diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs index 78eaf3410..76f01084c 100644 --- a/crates/syntax/src/parsing/reparsing.rs +++ b/crates/syntax/src/parsing/reparsing.rs | |||
@@ -10,7 +10,6 @@ use parser::Reparser; | |||
10 | use text_edit::Indel; | 10 | use text_edit::Indel; |
11 | 11 | ||
12 | use crate::{ | 12 | use crate::{ |
13 | algo, | ||
14 | parsing::{ | 13 | parsing::{ |
15 | lexer::{lex_single_syntax_kind, tokenize, Token}, | 14 | lexer::{lex_single_syntax_kind, tokenize, Token}, |
16 | text_token_source::TextTokenSource, | 15 | text_token_source::TextTokenSource, |
@@ -41,7 +40,7 @@ fn reparse_token<'node>( | |||
41 | root: &'node SyntaxNode, | 40 | root: &'node SyntaxNode, |
42 | edit: &Indel, | 41 | edit: &Indel, |
43 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { | 42 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { |
44 | let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); | 43 | let prev_token = root.covering_element(edit.delete).as_token()?.clone(); |
45 | let prev_token_kind = prev_token.kind(); | 44 | let prev_token_kind = prev_token.kind(); |
46 | match prev_token_kind { | 45 | match prev_token_kind { |
47 | WHITESPACE | COMMENT | IDENT | STRING => { | 46 | WHITESPACE | COMMENT | IDENT | STRING => { |
@@ -124,7 +123,7 @@ fn is_contextual_kw(text: &str) -> bool { | |||
124 | } | 123 | } |
125 | 124 | ||
126 | fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> { | 125 | fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> { |
127 | let node = algo::find_covering_element(node, range); | 126 | let node = node.covering_element(range); |
128 | 127 | ||
129 | let mut ancestors = match node { | 128 | let mut ancestors = match node { |
130 | NodeOrToken::Token(it) => it.parent().ancestors(), | 129 | NodeOrToken::Token(it) => it.parent().ancestors(), |