diff options
author | Aleksey Kladov <[email protected]> | 2021-01-30 15:19:21 +0000 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2021-03-16 13:10:49 +0000 |
commit | f5a81ec4683613bd62624811733345d627f2127b (patch) | |
tree | 54490888591ddc005d510695787308b78739ef05 /crates/ide/src | |
parent | 62ec04bbd53ba50e21a7b8f23d46958d322640eb (diff) |
Upgrade rowan
Notably, new rowan comes with support for mutable syntax trees.
Diffstat (limited to 'crates/ide/src')
-rw-r--r-- | crates/ide/src/call_hierarchy.rs | 11 | ||||
-rw-r--r-- | crates/ide/src/doc_links.rs | 2 | ||||
-rw-r--r-- | crates/ide/src/extend_selection.rs | 13 | ||||
-rw-r--r-- | crates/ide/src/goto_definition.rs | 4 | ||||
-rw-r--r-- | crates/ide/src/goto_type_definition.rs | 2 | ||||
-rw-r--r-- | crates/ide/src/hover.rs | 4 | ||||
-rw-r--r-- | crates/ide/src/join_lines.rs | 44 | ||||
-rw-r--r-- | crates/ide/src/matching_brace.rs | 2 | ||||
-rw-r--r-- | crates/ide/src/references.rs | 7 | ||||
-rw-r--r-- | crates/ide/src/runnables.rs | 3 | ||||
-rw-r--r-- | crates/ide/src/syntax_highlighting.rs | 19 | ||||
-rw-r--r-- | crates/ide/src/syntax_highlighting/format.rs | 2 | ||||
-rw-r--r-- | crates/ide/src/syntax_tree.rs | 2 | ||||
-rw-r--r-- | crates/ide/src/typing.rs | 2 |
14 files changed, 65 insertions, 52 deletions
diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs index b848945d7..96021f677 100644 --- a/crates/ide/src/call_hierarchy.rs +++ b/crates/ide/src/call_hierarchy.rs | |||
@@ -53,10 +53,8 @@ pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Optio | |||
53 | for (r_range, _) in references { | 53 | for (r_range, _) in references { |
54 | let token = file.token_at_offset(r_range.start()).next()?; | 54 | let token = file.token_at_offset(r_range.start()).next()?; |
55 | let token = sema.descend_into_macros(token); | 55 | let token = sema.descend_into_macros(token); |
56 | let syntax = token.parent(); | ||
57 | |||
58 | // This target is the containing function | 56 | // This target is the containing function |
59 | if let Some(nav) = syntax.ancestors().find_map(|node| { | 57 | if let Some(nav) = token.ancestors().find_map(|node| { |
60 | let fn_ = ast::Fn::cast(node)?; | 58 | let fn_ = ast::Fn::cast(node)?; |
61 | let def = sema.to_def(&fn_)?; | 59 | let def = sema.to_def(&fn_)?; |
62 | def.try_to_nav(sema.db) | 60 | def.try_to_nav(sema.db) |
@@ -77,12 +75,13 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio | |||
77 | let file = file.syntax(); | 75 | let file = file.syntax(); |
78 | let token = file.token_at_offset(position.offset).next()?; | 76 | let token = file.token_at_offset(position.offset).next()?; |
79 | let token = sema.descend_into_macros(token); | 77 | let token = sema.descend_into_macros(token); |
80 | let syntax = token.parent(); | ||
81 | 78 | ||
82 | let mut calls = CallLocations::default(); | 79 | let mut calls = CallLocations::default(); |
83 | 80 | ||
84 | syntax | 81 | token |
85 | .descendants() | 82 | .parent() |
83 | .into_iter() | ||
84 | .flat_map(|it| it.descendants()) | ||
86 | .filter_map(|node| FnCallNode::with_node_exact(&node)) | 85 | .filter_map(|node| FnCallNode::with_node_exact(&node)) |
87 | .filter_map(|call_node| { | 86 | .filter_map(|call_node| { |
88 | let name_ref = call_node.name_ref()?; | 87 | let name_ref = call_node.name_ref()?; |
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index 7bdd3cca3..461e11060 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs | |||
@@ -279,7 +279,7 @@ pub(crate) fn external_docs( | |||
279 | let token = pick_best(file.token_at_offset(position.offset))?; | 279 | let token = pick_best(file.token_at_offset(position.offset))?; |
280 | let token = sema.descend_into_macros(token); | 280 | let token = sema.descend_into_macros(token); |
281 | 281 | ||
282 | let node = token.parent(); | 282 | let node = token.parent()?; |
283 | let definition = match_ast! { | 283 | let definition = match_ast! { |
284 | match node { | 284 | match node { |
285 | ast::NameRef(name_ref) => NameRefClass::classify(&sema, &name_ref).map(|d| d.referenced(sema.db)), | 285 | ast::NameRef(name_ref) => NameRefClass::classify(&sema, &name_ref).map(|d| d.referenced(sema.db)), |
diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs index b540d04fe..e187243cb 100644 --- a/crates/ide/src/extend_selection.rs +++ b/crates/ide/src/extend_selection.rs | |||
@@ -88,7 +88,7 @@ fn try_extend_selection( | |||
88 | return Some(range); | 88 | return Some(range); |
89 | } | 89 | } |
90 | } | 90 | } |
91 | token.parent() | 91 | token.parent()? |
92 | } | 92 | } |
93 | NodeOrToken::Node(node) => node, | 93 | NodeOrToken::Node(node) => node, |
94 | }; | 94 | }; |
@@ -142,7 +142,8 @@ fn extend_tokens_from_range( | |||
142 | let extended = { | 142 | let extended = { |
143 | let fst_expanded = sema.descend_into_macros(first_token.clone()); | 143 | let fst_expanded = sema.descend_into_macros(first_token.clone()); |
144 | let lst_expanded = sema.descend_into_macros(last_token.clone()); | 144 | let lst_expanded = sema.descend_into_macros(last_token.clone()); |
145 | let mut lca = algo::least_common_ancestor(&fst_expanded.parent(), &lst_expanded.parent())?; | 145 | let mut lca = |
146 | algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?; | ||
146 | lca = shallowest_node(&lca); | 147 | lca = shallowest_node(&lca); |
147 | if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) { | 148 | if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) { |
148 | lca = lca.parent()?; | 149 | lca = lca.parent()?; |
@@ -151,9 +152,13 @@ fn extend_tokens_from_range( | |||
151 | }; | 152 | }; |
152 | 153 | ||
153 | // Compute parent node range | 154 | // Compute parent node range |
154 | let validate = |token: &SyntaxToken| { | 155 | let validate = |token: &SyntaxToken| -> bool { |
155 | let expanded = sema.descend_into_macros(token.clone()); | 156 | let expanded = sema.descend_into_macros(token.clone()); |
156 | algo::least_common_ancestor(&extended, &expanded.parent()).as_ref() == Some(&extended) | 157 | let parent = match expanded.parent() { |
158 | Some(it) => it, | ||
159 | None => return false, | ||
160 | }; | ||
161 | algo::least_common_ancestor(&extended, &parent).as_ref() == Some(&extended) | ||
157 | }; | 162 | }; |
158 | 163 | ||
159 | // Find the first and last text range under expanded parent | 164 | // Find the first and last text range under expanded parent |
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index e8f31e4b1..6986477a5 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs | |||
@@ -30,7 +30,7 @@ pub(crate) fn goto_definition( | |||
30 | let file = sema.parse(position.file_id).syntax().clone(); | 30 | let file = sema.parse(position.file_id).syntax().clone(); |
31 | let original_token = pick_best(file.token_at_offset(position.offset))?; | 31 | let original_token = pick_best(file.token_at_offset(position.offset))?; |
32 | let token = sema.descend_into_macros(original_token.clone()); | 32 | let token = sema.descend_into_macros(original_token.clone()); |
33 | let parent = token.parent(); | 33 | let parent = token.parent()?; |
34 | if let Some(comment) = ast::Comment::cast(token) { | 34 | if let Some(comment) = ast::Comment::cast(token) { |
35 | let nav = def_for_doc_comment(&sema, position, &comment)?.try_to_nav(db)?; | 35 | let nav = def_for_doc_comment(&sema, position, &comment)?.try_to_nav(db)?; |
36 | return Some(RangeInfo::new(original_token.text_range(), vec![nav])); | 36 | return Some(RangeInfo::new(original_token.text_range(), vec![nav])); |
@@ -63,7 +63,7 @@ fn def_for_doc_comment( | |||
63 | position: FilePosition, | 63 | position: FilePosition, |
64 | doc_comment: &ast::Comment, | 64 | doc_comment: &ast::Comment, |
65 | ) -> Option<hir::ModuleDef> { | 65 | ) -> Option<hir::ModuleDef> { |
66 | let parent = doc_comment.syntax().parent(); | 66 | let parent = doc_comment.syntax().parent()?; |
67 | let (link, ns) = extract_positioned_link_from_comment(position, doc_comment)?; | 67 | let (link, ns) = extract_positioned_link_from_comment(position, doc_comment)?; |
68 | 68 | ||
69 | let def = doc_owner_to_def(sema, parent)?; | 69 | let def = doc_owner_to_def(sema, parent)?; |
diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs index 369a59820..2d38cb112 100644 --- a/crates/ide/src/goto_type_definition.rs +++ b/crates/ide/src/goto_type_definition.rs | |||
@@ -22,7 +22,7 @@ pub(crate) fn goto_type_definition( | |||
22 | let token: SyntaxToken = pick_best(file.syntax().token_at_offset(position.offset))?; | 22 | let token: SyntaxToken = pick_best(file.syntax().token_at_offset(position.offset))?; |
23 | let token: SyntaxToken = sema.descend_into_macros(token); | 23 | let token: SyntaxToken = sema.descend_into_macros(token); |
24 | 24 | ||
25 | let (ty, node) = sema.ancestors_with_macros(token.parent()).find_map(|node| { | 25 | let (ty, node) = sema.token_ancestors_with_macros(token).find_map(|node| { |
26 | let ty = match_ast! { | 26 | let ty = match_ast! { |
27 | match node { | 27 | match node { |
28 | ast::Expr(it) => sema.type_of_expr(&it)?, | 28 | ast::Expr(it) => sema.type_of_expr(&it)?, |
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index ea45086ce..6215df6bd 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs | |||
@@ -92,7 +92,7 @@ pub(crate) fn hover( | |||
92 | 92 | ||
93 | let mut res = HoverResult::default(); | 93 | let mut res = HoverResult::default(); |
94 | 94 | ||
95 | let node = token.parent(); | 95 | let node = token.parent()?; |
96 | let definition = match_ast! { | 96 | let definition = match_ast! { |
97 | match node { | 97 | match node { |
98 | // we don't use NameClass::referenced_or_defined here as we do not want to resolve | 98 | // we don't use NameClass::referenced_or_defined here as we do not want to resolve |
@@ -438,7 +438,7 @@ fn hover_for_keyword( | |||
438 | if !token.kind().is_keyword() { | 438 | if !token.kind().is_keyword() { |
439 | return None; | 439 | return None; |
440 | } | 440 | } |
441 | let famous_defs = FamousDefs(&sema, sema.scope(&token.parent()).krate()); | 441 | let famous_defs = FamousDefs(&sema, sema.scope(&token.parent()?).krate()); |
442 | // std exposes {}_keyword modules with docstrings on the root to document keywords | 442 | // std exposes {}_keyword modules with docstrings on the root to document keywords |
443 | let keyword_mod = format!("{}_keyword", token.text()); | 443 | let keyword_mod = format!("{}_keyword", token.text()); |
444 | let doc_owner = find_std_module(&famous_defs, &keyword_mod)?; | 444 | let doc_owner = find_std_module(&famous_defs, &keyword_mod)?; |
diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs index d571ed559..4b25135cd 100644 --- a/crates/ide/src/join_lines.rs +++ b/crates/ide/src/join_lines.rs | |||
@@ -32,29 +32,35 @@ pub(crate) fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit { | |||
32 | range | 32 | range |
33 | }; | 33 | }; |
34 | 34 | ||
35 | let node = match file.syntax().covering_element(range) { | ||
36 | NodeOrToken::Node(node) => node, | ||
37 | NodeOrToken::Token(token) => token.parent(), | ||
38 | }; | ||
39 | let mut edit = TextEdit::builder(); | 35 | let mut edit = TextEdit::builder(); |
40 | for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) { | 36 | match file.syntax().covering_element(range) { |
41 | let range = match range.intersect(token.text_range()) { | 37 | NodeOrToken::Node(node) => { |
42 | Some(range) => range, | 38 | for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) { |
43 | None => continue, | 39 | remove_newlines(&mut edit, &token, range) |
44 | } - token.text_range().start(); | ||
45 | let text = token.text(); | ||
46 | for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') { | ||
47 | let pos: TextSize = (pos as u32).into(); | ||
48 | let offset = token.text_range().start() + range.start() + pos; | ||
49 | if !edit.invalidates_offset(offset) { | ||
50 | remove_newline(&mut edit, &token, offset); | ||
51 | } | 40 | } |
52 | } | 41 | } |
53 | } | 42 | NodeOrToken::Token(token) => remove_newlines(&mut edit, &token, range), |
54 | 43 | }; | |
55 | edit.finish() | 44 | edit.finish() |
56 | } | 45 | } |
57 | 46 | ||
47 | fn remove_newlines(edit: &mut TextEditBuilder, token: &SyntaxToken, range: TextRange) { | ||
48 | let intersection = match range.intersect(token.text_range()) { | ||
49 | Some(range) => range, | ||
50 | None => return, | ||
51 | }; | ||
52 | |||
53 | let range = intersection - token.text_range().start(); | ||
54 | let text = token.text(); | ||
55 | for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') { | ||
56 | let pos: TextSize = (pos as u32).into(); | ||
57 | let offset = token.text_range().start() + range.start() + pos; | ||
58 | if !edit.invalidates_offset(offset) { | ||
59 | remove_newline(edit, &token, offset); | ||
60 | } | ||
61 | } | ||
62 | } | ||
63 | |||
58 | fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextSize) { | 64 | fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextSize) { |
59 | if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 { | 65 | if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 { |
60 | let mut string_open_quote = false; | 66 | let mut string_open_quote = false; |
@@ -148,7 +154,7 @@ fn has_comma_after(node: &SyntaxNode) -> bool { | |||
148 | } | 154 | } |
149 | 155 | ||
150 | fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { | 156 | fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { |
151 | let block_expr = ast::BlockExpr::cast(token.parent())?; | 157 | let block_expr = ast::BlockExpr::cast(token.parent()?)?; |
152 | if !block_expr.is_standalone() { | 158 | if !block_expr.is_standalone() { |
153 | return None; | 159 | return None; |
154 | } | 160 | } |
@@ -170,7 +176,7 @@ fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Op | |||
170 | } | 176 | } |
171 | 177 | ||
172 | fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { | 178 | fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { |
173 | let use_tree_list = ast::UseTreeList::cast(token.parent())?; | 179 | let use_tree_list = ast::UseTreeList::cast(token.parent()?)?; |
174 | let (tree,) = use_tree_list.use_trees().collect_tuple()?; | 180 | let (tree,) = use_tree_list.use_trees().collect_tuple()?; |
175 | edit.replace(use_tree_list.syntax().text_range(), tree.syntax().text().to_string()); | 181 | edit.replace(use_tree_list.syntax().text_range(), tree.syntax().text().to_string()); |
176 | Some(()) | 182 | Some(()) |
diff --git a/crates/ide/src/matching_brace.rs b/crates/ide/src/matching_brace.rs index 000c412d9..4241a6dac 100644 --- a/crates/ide/src/matching_brace.rs +++ b/crates/ide/src/matching_brace.rs | |||
@@ -25,7 +25,7 @@ pub(crate) fn matching_brace(file: &SourceFile, offset: TextSize) -> Option<Text | |||
25 | Some((node, idx)) | 25 | Some((node, idx)) |
26 | }) | 26 | }) |
27 | .next()?; | 27 | .next()?; |
28 | let parent = brace_token.parent(); | 28 | let parent = brace_token.parent()?; |
29 | if brace_token.kind() == T![|] && !ast::ParamList::can_cast(parent.kind()) { | 29 | if brace_token.kind() == T![|] && !ast::ParamList::can_cast(parent.kind()) { |
30 | cov_mark::hit!(pipes_not_braces); | 30 | cov_mark::hit!(pipes_not_braces); |
31 | return None; | 31 | return None; |
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index ec7c7686d..e8a5666bc 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs | |||
@@ -148,14 +148,15 @@ fn decl_access(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> Optio | |||
148 | 148 | ||
149 | fn get_name_of_item_declaration(syntax: &SyntaxNode, position: FilePosition) -> Option<ast::Name> { | 149 | fn get_name_of_item_declaration(syntax: &SyntaxNode, position: FilePosition) -> Option<ast::Name> { |
150 | let token = syntax.token_at_offset(position.offset).right_biased()?; | 150 | let token = syntax.token_at_offset(position.offset).right_biased()?; |
151 | let token_parent = token.parent()?; | ||
151 | let kind = token.kind(); | 152 | let kind = token.kind(); |
152 | if kind == T![;] { | 153 | if kind == T![;] { |
153 | ast::Struct::cast(token.parent()) | 154 | ast::Struct::cast(token_parent) |
154 | .filter(|struct_| struct_.field_list().is_none()) | 155 | .filter(|struct_| struct_.field_list().is_none()) |
155 | .and_then(|struct_| struct_.name()) | 156 | .and_then(|struct_| struct_.name()) |
156 | } else if kind == T!['{'] { | 157 | } else if kind == T!['{'] { |
157 | match_ast! { | 158 | match_ast! { |
158 | match (token.parent()) { | 159 | match token_parent { |
159 | ast::RecordFieldList(rfl) => match_ast! { | 160 | ast::RecordFieldList(rfl) => match_ast! { |
160 | match (rfl.syntax().parent()?) { | 161 | match (rfl.syntax().parent()?) { |
161 | ast::Variant(it) => it.name(), | 162 | ast::Variant(it) => it.name(), |
@@ -169,7 +170,7 @@ fn get_name_of_item_declaration(syntax: &SyntaxNode, position: FilePosition) -> | |||
169 | } | 170 | } |
170 | } | 171 | } |
171 | } else if kind == T!['('] { | 172 | } else if kind == T!['('] { |
172 | let tfl = ast::TupleFieldList::cast(token.parent())?; | 173 | let tfl = ast::TupleFieldList::cast(token_parent)?; |
173 | match_ast! { | 174 | match_ast! { |
174 | match (tfl.syntax().parent()?) { | 175 | match (tfl.syntax().parent()?) { |
175 | ast::Variant(it) => it.name(), | 176 | ast::Variant(it) => it.name(), |
diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 0c7a8fbf8..397e2126b 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs | |||
@@ -167,8 +167,7 @@ fn find_related_tests( | |||
167 | let functions = refs.iter().filter_map(|(range, _)| { | 167 | let functions = refs.iter().filter_map(|(range, _)| { |
168 | let token = file.token_at_offset(range.start()).next()?; | 168 | let token = file.token_at_offset(range.start()).next()?; |
169 | let token = sema.descend_into_macros(token); | 169 | let token = sema.descend_into_macros(token); |
170 | let syntax = token.parent(); | 170 | token.ancestors().find_map(ast::Fn::cast) |
171 | syntax.ancestors().find_map(ast::Fn::cast) | ||
172 | }); | 171 | }); |
173 | 172 | ||
174 | for fn_def in functions { | 173 | for fn_def in functions { |
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index 9bed329d8..870146d24 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs | |||
@@ -64,7 +64,7 @@ pub(crate) fn highlight( | |||
64 | Some(range) => { | 64 | Some(range) => { |
65 | let node = match source_file.syntax().covering_element(range) { | 65 | let node = match source_file.syntax().covering_element(range) { |
66 | NodeOrToken::Node(it) => it, | 66 | NodeOrToken::Node(it) => it, |
67 | NodeOrToken::Token(it) => it.parent(), | 67 | NodeOrToken::Token(it) => it.parent().unwrap(), |
68 | }; | 68 | }; |
69 | (node, range) | 69 | (node, range) |
70 | } | 70 | } |
@@ -167,16 +167,19 @@ fn traverse( | |||
167 | let element_to_highlight = if current_macro_call.is_some() && element.kind() != COMMENT { | 167 | let element_to_highlight = if current_macro_call.is_some() && element.kind() != COMMENT { |
168 | // Inside a macro -- expand it first | 168 | // Inside a macro -- expand it first |
169 | let token = match element.clone().into_token() { | 169 | let token = match element.clone().into_token() { |
170 | Some(it) if it.parent().kind() == TOKEN_TREE => it, | 170 | Some(it) if it.parent().map_or(false, |it| it.kind() == TOKEN_TREE) => it, |
171 | _ => continue, | 171 | _ => continue, |
172 | }; | 172 | }; |
173 | let token = sema.descend_into_macros(token.clone()); | 173 | let token = sema.descend_into_macros(token.clone()); |
174 | let parent = token.parent(); | 174 | match token.parent() { |
175 | 175 | Some(parent) => { | |
176 | // We only care Name and Name_ref | 176 | // We only care Name and Name_ref |
177 | match (token.kind(), parent.kind()) { | 177 | match (token.kind(), parent.kind()) { |
178 | (IDENT, NAME) | (IDENT, NAME_REF) => parent.into(), | 178 | (IDENT, NAME) | (IDENT, NAME_REF) => parent.into(), |
179 | _ => token.into(), | 179 | _ => token.into(), |
180 | } | ||
181 | } | ||
182 | None => token.into(), | ||
180 | } | 183 | } |
181 | } else { | 184 | } else { |
182 | element.clone() | 185 | element.clone() |
diff --git a/crates/ide/src/syntax_highlighting/format.rs b/crates/ide/src/syntax_highlighting/format.rs index 8c67a0863..e503abc93 100644 --- a/crates/ide/src/syntax_highlighting/format.rs +++ b/crates/ide/src/syntax_highlighting/format.rs | |||
@@ -28,7 +28,7 @@ pub(super) fn highlight_format_string( | |||
28 | } | 28 | } |
29 | 29 | ||
30 | fn is_format_string(string: &ast::String) -> Option<()> { | 30 | fn is_format_string(string: &ast::String) -> Option<()> { |
31 | let parent = string.syntax().parent(); | 31 | let parent = string.syntax().parent()?; |
32 | 32 | ||
33 | let name = parent.parent().and_then(ast::MacroCall::cast)?.path()?.segment()?.name_ref()?; | 33 | let name = parent.parent().and_then(ast::MacroCall::cast)?.path()?.segment()?.name_ref()?; |
34 | if !matches!(name.text(), "format_args" | "format_args_nl") { | 34 | if !matches!(name.text(), "format_args" | "format_args_nl") { |
diff --git a/crates/ide/src/syntax_tree.rs b/crates/ide/src/syntax_tree.rs index f979ba434..8979de528 100644 --- a/crates/ide/src/syntax_tree.rs +++ b/crates/ide/src/syntax_tree.rs | |||
@@ -27,7 +27,7 @@ pub(crate) fn syntax_tree( | |||
27 | if let Some(tree) = syntax_tree_for_string(&token, text_range) { | 27 | if let Some(tree) = syntax_tree_for_string(&token, text_range) { |
28 | return tree; | 28 | return tree; |
29 | } | 29 | } |
30 | token.parent() | 30 | token.parent().unwrap() |
31 | } | 31 | } |
32 | }; | 32 | }; |
33 | 33 | ||
diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs index a718faf63..e10b7d98e 100644 --- a/crates/ide/src/typing.rs +++ b/crates/ide/src/typing.rs | |||
@@ -108,7 +108,7 @@ fn on_dot_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> { | |||
108 | }; | 108 | }; |
109 | let current_indent_len = TextSize::of(current_indent); | 109 | let current_indent_len = TextSize::of(current_indent); |
110 | 110 | ||
111 | let parent = whitespace.syntax().parent(); | 111 | let parent = whitespace.syntax().parent()?; |
112 | // Make sure dot is a part of call chain | 112 | // Make sure dot is a part of call chain |
113 | if !matches!(parent.kind(), FIELD_EXPR | METHOD_CALL_EXPR) { | 113 | if !matches!(parent.kind(), FIELD_EXPR | METHOD_CALL_EXPR) { |
114 | return None; | 114 | return None; |