From f5a81ec4683613bd62624811733345d627f2127b Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sat, 30 Jan 2021 18:19:21 +0300 Subject: Upgrade rowan Notably, new rowan comes with support for mutable syntax trees. --- crates/ide/src/call_hierarchy.rs | 11 ++++--- crates/ide/src/doc_links.rs | 2 +- crates/ide/src/extend_selection.rs | 13 +++++--- crates/ide/src/goto_definition.rs | 4 +-- crates/ide/src/goto_type_definition.rs | 2 +- crates/ide/src/hover.rs | 4 +-- crates/ide/src/join_lines.rs | 44 ++++++++++++++++------------ crates/ide/src/matching_brace.rs | 2 +- crates/ide/src/references.rs | 7 +++-- crates/ide/src/runnables.rs | 3 +- crates/ide/src/syntax_highlighting.rs | 19 +++++++----- crates/ide/src/syntax_highlighting/format.rs | 2 +- crates/ide/src/syntax_tree.rs | 2 +- crates/ide/src/typing.rs | 2 +- 14 files changed, 65 insertions(+), 52 deletions(-) (limited to 'crates/ide') diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs index b848945d7..96021f677 100644 --- a/crates/ide/src/call_hierarchy.rs +++ b/crates/ide/src/call_hierarchy.rs @@ -53,10 +53,8 @@ pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Optio for (r_range, _) in references { let token = file.token_at_offset(r_range.start()).next()?; let token = sema.descend_into_macros(token); - let syntax = token.parent(); - // This target is the containing function - if let Some(nav) = syntax.ancestors().find_map(|node| { + if let Some(nav) = token.ancestors().find_map(|node| { let fn_ = ast::Fn::cast(node)?; let def = sema.to_def(&fn_)?; def.try_to_nav(sema.db) @@ -77,12 +75,13 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio let file = file.syntax(); let token = file.token_at_offset(position.offset).next()?; let token = sema.descend_into_macros(token); - let syntax = token.parent(); let mut calls = CallLocations::default(); - syntax - .descendants() + token + .parent() + .into_iter() + .flat_map(|it| it.descendants()) .filter_map(|node| FnCallNode::with_node_exact(&node)) .filter_map(|call_node| { let name_ref = call_node.name_ref()?; diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index 7bdd3cca3..461e11060 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -279,7 +279,7 @@ pub(crate) fn external_docs( let token = pick_best(file.token_at_offset(position.offset))?; let token = sema.descend_into_macros(token); - let node = token.parent(); + let node = token.parent()?; let definition = match_ast! { match node { ast::NameRef(name_ref) => NameRefClass::classify(&sema, &name_ref).map(|d| d.referenced(sema.db)), diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs index b540d04fe..e187243cb 100644 --- a/crates/ide/src/extend_selection.rs +++ b/crates/ide/src/extend_selection.rs @@ -88,7 +88,7 @@ fn try_extend_selection( return Some(range); } } - token.parent() + token.parent()? } NodeOrToken::Node(node) => node, }; @@ -142,7 +142,8 @@ fn extend_tokens_from_range( let extended = { let fst_expanded = sema.descend_into_macros(first_token.clone()); let lst_expanded = sema.descend_into_macros(last_token.clone()); - let mut lca = algo::least_common_ancestor(&fst_expanded.parent(), &lst_expanded.parent())?; + let mut lca = + algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?; lca = shallowest_node(&lca); if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) { lca = lca.parent()?; @@ -151,9 +152,13 @@ fn extend_tokens_from_range( }; // Compute parent node range - let validate = |token: &SyntaxToken| { + let validate = |token: &SyntaxToken| -> bool { let expanded = sema.descend_into_macros(token.clone()); - algo::least_common_ancestor(&extended, &expanded.parent()).as_ref() == Some(&extended) + let parent = match expanded.parent() { + Some(it) => it, + None => return false, + }; + algo::least_common_ancestor(&extended, &parent).as_ref() == Some(&extended) }; // Find the first and last text range under expanded parent diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index e8f31e4b1..6986477a5 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -30,7 +30,7 @@ pub(crate) fn goto_definition( let file = sema.parse(position.file_id).syntax().clone(); let original_token = pick_best(file.token_at_offset(position.offset))?; let token = sema.descend_into_macros(original_token.clone()); - let parent = token.parent(); + let parent = token.parent()?; if let Some(comment) = ast::Comment::cast(token) { let nav = def_for_doc_comment(&sema, position, &comment)?.try_to_nav(db)?; return Some(RangeInfo::new(original_token.text_range(), vec![nav])); @@ -63,7 +63,7 @@ fn def_for_doc_comment( position: FilePosition, doc_comment: &ast::Comment, ) -> Option { - let parent = doc_comment.syntax().parent(); + let parent = doc_comment.syntax().parent()?; let (link, ns) = extract_positioned_link_from_comment(position, doc_comment)?; let def = doc_owner_to_def(sema, parent)?; diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs index 369a59820..2d38cb112 100644 --- a/crates/ide/src/goto_type_definition.rs +++ b/crates/ide/src/goto_type_definition.rs @@ -22,7 +22,7 @@ pub(crate) fn goto_type_definition( let token: SyntaxToken = pick_best(file.syntax().token_at_offset(position.offset))?; let token: SyntaxToken = sema.descend_into_macros(token); - let (ty, node) = sema.ancestors_with_macros(token.parent()).find_map(|node| { + let (ty, node) = sema.token_ancestors_with_macros(token).find_map(|node| { let ty = match_ast! { match node { ast::Expr(it) => sema.type_of_expr(&it)?, diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index ea45086ce..6215df6bd 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -92,7 +92,7 @@ pub(crate) fn hover( let mut res = HoverResult::default(); - let node = token.parent(); + let node = token.parent()?; let definition = match_ast! { match node { // we don't use NameClass::referenced_or_defined here as we do not want to resolve @@ -438,7 +438,7 @@ fn hover_for_keyword( if !token.kind().is_keyword() { return None; } - let famous_defs = FamousDefs(&sema, sema.scope(&token.parent()).krate()); + let famous_defs = FamousDefs(&sema, sema.scope(&token.parent()?).krate()); // std exposes {}_keyword modules with docstrings on the root to document keywords let keyword_mod = format!("{}_keyword", token.text()); let doc_owner = find_std_module(&famous_defs, &keyword_mod)?; diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs index d571ed559..4b25135cd 100644 --- a/crates/ide/src/join_lines.rs +++ b/crates/ide/src/join_lines.rs @@ -32,29 +32,35 @@ pub(crate) fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit { range }; - let node = match file.syntax().covering_element(range) { - NodeOrToken::Node(node) => node, - NodeOrToken::Token(token) => token.parent(), - }; let mut edit = TextEdit::builder(); - for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) { - let range = match range.intersect(token.text_range()) { - Some(range) => range, - None => continue, - } - token.text_range().start(); - let text = token.text(); - for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') { - let pos: TextSize = (pos as u32).into(); - let offset = token.text_range().start() + range.start() + pos; - if !edit.invalidates_offset(offset) { - remove_newline(&mut edit, &token, offset); + match file.syntax().covering_element(range) { + NodeOrToken::Node(node) => { + for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) { + remove_newlines(&mut edit, &token, range) } } - } - + NodeOrToken::Token(token) => remove_newlines(&mut edit, &token, range), + }; edit.finish() } +fn remove_newlines(edit: &mut TextEditBuilder, token: &SyntaxToken, range: TextRange) { + let intersection = match range.intersect(token.text_range()) { + Some(range) => range, + None => return, + }; + + let range = intersection - token.text_range().start(); + let text = token.text(); + for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') { + let pos: TextSize = (pos as u32).into(); + let offset = token.text_range().start() + range.start() + pos; + if !edit.invalidates_offset(offset) { + remove_newline(edit, &token, offset); + } + } +} + fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextSize) { if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 { let mut string_open_quote = false; @@ -148,7 +154,7 @@ fn has_comma_after(node: &SyntaxNode) -> bool { } fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { - let block_expr = ast::BlockExpr::cast(token.parent())?; + let block_expr = ast::BlockExpr::cast(token.parent()?)?; if !block_expr.is_standalone() { return None; } @@ -170,7 +176,7 @@ fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Op } fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { - let use_tree_list = ast::UseTreeList::cast(token.parent())?; + let use_tree_list = ast::UseTreeList::cast(token.parent()?)?; let (tree,) = use_tree_list.use_trees().collect_tuple()?; edit.replace(use_tree_list.syntax().text_range(), tree.syntax().text().to_string()); Some(()) diff --git a/crates/ide/src/matching_brace.rs b/crates/ide/src/matching_brace.rs index 000c412d9..4241a6dac 100644 --- a/crates/ide/src/matching_brace.rs +++ b/crates/ide/src/matching_brace.rs @@ -25,7 +25,7 @@ pub(crate) fn matching_brace(file: &SourceFile, offset: TextSize) -> Option Optio fn get_name_of_item_declaration(syntax: &SyntaxNode, position: FilePosition) -> Option { let token = syntax.token_at_offset(position.offset).right_biased()?; + let token_parent = token.parent()?; let kind = token.kind(); if kind == T![;] { - ast::Struct::cast(token.parent()) + ast::Struct::cast(token_parent) .filter(|struct_| struct_.field_list().is_none()) .and_then(|struct_| struct_.name()) } else if kind == T!['{'] { match_ast! { - match (token.parent()) { + match token_parent { ast::RecordFieldList(rfl) => match_ast! { match (rfl.syntax().parent()?) { ast::Variant(it) => it.name(), @@ -169,7 +170,7 @@ fn get_name_of_item_declaration(syntax: &SyntaxNode, position: FilePosition) -> } } } else if kind == T!['('] { - let tfl = ast::TupleFieldList::cast(token.parent())?; + let tfl = ast::TupleFieldList::cast(token_parent)?; match_ast! { match (tfl.syntax().parent()?) { ast::Variant(it) => it.name(), diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 0c7a8fbf8..397e2126b 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -167,8 +167,7 @@ fn find_related_tests( let functions = refs.iter().filter_map(|(range, _)| { let token = file.token_at_offset(range.start()).next()?; let token = sema.descend_into_macros(token); - let syntax = token.parent(); - syntax.ancestors().find_map(ast::Fn::cast) + token.ancestors().find_map(ast::Fn::cast) }); for fn_def in functions { diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index 9bed329d8..870146d24 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -64,7 +64,7 @@ pub(crate) fn highlight( Some(range) => { let node = match source_file.syntax().covering_element(range) { NodeOrToken::Node(it) => it, - NodeOrToken::Token(it) => it.parent(), + NodeOrToken::Token(it) => it.parent().unwrap(), }; (node, range) } @@ -167,16 +167,19 @@ fn traverse( let element_to_highlight = if current_macro_call.is_some() && element.kind() != COMMENT { // Inside a macro -- expand it first let token = match element.clone().into_token() { - Some(it) if it.parent().kind() == TOKEN_TREE => it, + Some(it) if it.parent().map_or(false, |it| it.kind() == TOKEN_TREE) => it, _ => continue, }; let token = sema.descend_into_macros(token.clone()); - let parent = token.parent(); - - // We only care Name and Name_ref - match (token.kind(), parent.kind()) { - (IDENT, NAME) | (IDENT, NAME_REF) => parent.into(), - _ => token.into(), + match token.parent() { + Some(parent) => { + // We only care Name and Name_ref + match (token.kind(), parent.kind()) { + (IDENT, NAME) | (IDENT, NAME_REF) => parent.into(), + _ => token.into(), + } + } + None => token.into(), } } else { element.clone() diff --git a/crates/ide/src/syntax_highlighting/format.rs b/crates/ide/src/syntax_highlighting/format.rs index 8c67a0863..e503abc93 100644 --- a/crates/ide/src/syntax_highlighting/format.rs +++ b/crates/ide/src/syntax_highlighting/format.rs @@ -28,7 +28,7 @@ pub(super) fn highlight_format_string( } fn is_format_string(string: &ast::String) -> Option<()> { - let parent = string.syntax().parent(); + let parent = string.syntax().parent()?; let name = parent.parent().and_then(ast::MacroCall::cast)?.path()?.segment()?.name_ref()?; if !matches!(name.text(), "format_args" | "format_args_nl") { diff --git a/crates/ide/src/syntax_tree.rs b/crates/ide/src/syntax_tree.rs index f979ba434..8979de528 100644 --- a/crates/ide/src/syntax_tree.rs +++ b/crates/ide/src/syntax_tree.rs @@ -27,7 +27,7 @@ pub(crate) fn syntax_tree( if let Some(tree) = syntax_tree_for_string(&token, text_range) { return tree; } - token.parent() + token.parent().unwrap() } }; diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs index a718faf63..e10b7d98e 100644 --- a/crates/ide/src/typing.rs +++ b/crates/ide/src/typing.rs @@ -108,7 +108,7 @@ fn on_dot_typed(file: &SourceFile, offset: TextSize) -> Option { }; let current_indent_len = TextSize::of(current_indent); - let parent = whitespace.syntax().parent(); + let parent = whitespace.syntax().parent()?; // Make sure dot is a part of call chain if !matches!(parent.kind(), FIELD_EXPR | METHOD_CALL_EXPR) { return None; -- cgit v1.2.3