From c9cfd57eeaa53657c0af7b9c4ba74d6b7b9889ed Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sat, 20 Jul 2019 20:04:34 +0300 Subject: switch to upstream rowan's API --- Cargo.lock | 22 +- crates/ra_assists/Cargo.toml | 1 + crates/ra_assists/src/add_impl.rs | 5 +- crates/ra_assists/src/ast_editor.rs | 9 +- crates/ra_assists/src/introduce_variable.rs | 5 +- crates/ra_assists/src/move_guard.rs | 8 +- crates/ra_ide_api/Cargo.toml | 1 + crates/ra_ide_api/src/display/short_label.rs | 5 +- crates/ra_ide_api/src/extend_selection.rs | 10 +- crates/ra_ide_api/src/folding_ranges.rs | 18 +- crates/ra_ide_api/src/join_lines.rs | 6 +- crates/ra_ide_api/src/syntax_tree.rs | 6 +- crates/ra_mbe/src/syntax_bridge.rs | 13 +- crates/ra_mbe/src/tests.rs | 6 +- crates/ra_syntax/Cargo.toml | 2 +- crates/ra_syntax/src/algo.rs | 117 ++++++- crates/ra_syntax/src/ast/expr_extensions.rs | 13 +- crates/ra_syntax/src/ast/extensions.rs | 5 +- crates/ra_syntax/src/lib.rs | 16 +- crates/ra_syntax/src/parsing/reparsing.rs | 13 +- crates/ra_syntax/src/syntax_node.rs | 483 ++------------------------- crates/ra_syntax/src/syntax_text.rs | 178 ---------- 22 files changed, 206 insertions(+), 736 deletions(-) delete mode 100644 crates/ra_syntax/src/syntax_text.rs diff --git a/Cargo.lock b/Cargo.lock index 141cc6088..8feaf27ec 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -263,11 +263,6 @@ dependencies = [ "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "colosseum" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "console" version = "0.7.7" @@ -484,6 +479,11 @@ dependencies = [ "yansi 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "format-buf" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "fs_extra" version = "1.1.0" @@ -1124,6 +1124,7 @@ name = "ra_assists" version = "0.1.0" dependencies = [ "arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", + "format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "once_cell 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1211,6 +1212,7 @@ dependencies = [ name = "ra_ide_api" version = "0.1.0" dependencies = [ + "format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "fst 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "insta 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1313,7 +1315,7 @@ dependencies = [ "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "ra_parser 0.1.0", "ra_text_edit 0.1.0", - "rowan 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", + "rowan 0.6.0-pre.1 (registry+https://github.com/rust-lang/crates.io-index)", "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "test_utils 0.1.0", "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1584,11 +1586,9 @@ dependencies = [ [[package]] name = "rowan" -version = "0.5.6" +version = "0.6.0-pre.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2150,7 +2150,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9" "checksum clicolors-control 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "73abfd4c73d003a674ce5d2933fca6ce6c42480ea84a5ffe0a2dc39ed56300f9" "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" -"checksum colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "370c83b49aedf022ee27942e8ae1d9de1cf40dc9653ee6550e4455d08f6406f9" "checksum console 0.7.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8ca57c2c14b8a2bf3105bc9d15574aad80babf6a9c44b1058034cdf8bd169628" "checksum cpuprofiler 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "33f07976bb6821459632d7a18d97ccca005cb5c552f251f822c7c1781c1d7035" "checksum crossbeam-channel 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "0f0ed1a4de2235cabda8558ff5840bffb97fcb64c97827f354a451307df5f72b" @@ -2177,6 +2176,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum filetime 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "2f8c63033fcba1f51ef744505b3cad42510432b904c062afa67ad7ece008429d" "checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33" "checksum flexi_logger 0.13.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e9d3c4470d1ff8446baa0c13202646722886dde8dc4c5d33cb8242d70ece79d5" +"checksum format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f7aea5a5909a74969507051a3b17adc84737e31a5f910559892aedce026f4d53" "checksum fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674" "checksum fsevent 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5ab7d1bd1bd33cc98b0889831b72da23c0aa4df9cec7e0702f46ecea04b35db6" "checksum fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f41b048a94555da0f42f1d632e2e19510084fb8e303b0daa2816e733fb3644a0" @@ -2275,7 +2275,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7790c7f1cc73d831d28dc5a7deb316a006e7848e6a7f467cdb10a0a9e0fb1c" "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" "checksum ron 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "17f52a24414403f81528b67488cf8edc4eda977d3af1646bb6b106a600ead78f" -"checksum rowan 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0c433ffe99ac9b96fa9882805d05eee5d750c9202fb42d0546c556e5d70d54be" +"checksum rowan 0.6.0-pre.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0eeee40f1a2724b7d0d9fa5f73a7804cd2f4c91b37ba9f785d429f31819d60df" "checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af" "checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8" "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" diff --git a/crates/ra_assists/Cargo.toml b/crates/ra_assists/Cargo.toml index 5ddac1e48..2113286a3 100644 --- a/crates/ra_assists/Cargo.toml +++ b/crates/ra_assists/Cargo.toml @@ -5,6 +5,7 @@ version = "0.1.0" authors = ["rust-analyzer developers"] [dependencies] +format-buf = "1.0.0" once_cell = "0.2.0" join_to_string = "0.1.3" itertools = "0.8.0" diff --git a/crates/ra_assists/src/add_impl.rs b/crates/ra_assists/src/add_impl.rs index 59ca88468..4b61f4031 100644 --- a/crates/ra_assists/src/add_impl.rs +++ b/crates/ra_assists/src/add_impl.rs @@ -1,5 +1,4 @@ -use std::fmt::Write; - +use format_buf::format; use hir::db::HirDatabase; use join_to_string::join; use ra_syntax::{ @@ -19,7 +18,7 @@ pub(crate) fn add_impl(mut ctx: AssistCtx) -> Option { let mut buf = String::new(); buf.push_str("\n\nimpl"); if let Some(type_params) = &type_params { - write!(buf, "{}", type_params.syntax()).unwrap(); + format!(buf, "{}", type_params.syntax()); } buf.push_str(" "); buf.push_str(name.text().as_str()); diff --git a/crates/ra_assists/src/ast_editor.rs b/crates/ra_assists/src/ast_editor.rs index ab6c347ad..95b871b30 100644 --- a/crates/ra_assists/src/ast_editor.rs +++ b/crates/ra_assists/src/ast_editor.rs @@ -4,7 +4,10 @@ use arrayvec::ArrayVec; use hir::Name; use ra_fmt::leading_indent; use ra_syntax::{ - ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement, SyntaxKind::*, T, + algo::{insert_children, replace_children}, + ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement, + SyntaxKind::*, + T, }; use ra_text_edit::TextEditBuilder; @@ -38,7 +41,7 @@ impl AstEditor { position: InsertPosition, to_insert: impl Iterator, ) -> N { - let new_syntax = self.ast().syntax().insert_children(position, to_insert); + let new_syntax = insert_children(self.ast().syntax(), position, to_insert); N::cast(new_syntax).unwrap() } @@ -48,7 +51,7 @@ impl AstEditor { to_delete: RangeInclusive, to_insert: impl Iterator, ) -> N { - let new_syntax = self.ast().syntax().replace_children(to_delete, to_insert); + let new_syntax = replace_children(self.ast().syntax(), to_delete, to_insert); N::cast(new_syntax).unwrap() } diff --git a/crates/ra_assists/src/introduce_variable.rs b/crates/ra_assists/src/introduce_variable.rs index 911de2d48..5eb708310 100644 --- a/crates/ra_assists/src/introduce_variable.rs +++ b/crates/ra_assists/src/introduce_variable.rs @@ -1,5 +1,4 @@ -use std::fmt::Write; - +use format_buf::format; use hir::db::HirDatabase; use ra_syntax::{ ast::{self, AstNode}, @@ -37,7 +36,7 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx) -> Option buf.push_str("let var_name = "); TextUnit::of_str("let ") }; - write!(buf, "{}", expr.syntax()).unwrap(); + format!(buf, "{}", expr.syntax()); let full_stmt = ast::ExprStmt::cast(anchor_stmt.clone()); let is_full_stmt = if let Some(expr_stmt) = &full_stmt { Some(expr.syntax().clone()) == expr_stmt.expr().map(|e| e.syntax().clone()) diff --git a/crates/ra_assists/src/move_guard.rs b/crates/ra_assists/src/move_guard.rs index 0f3cdbe53..127c9e068 100644 --- a/crates/ra_assists/src/move_guard.rs +++ b/crates/ra_assists/src/move_guard.rs @@ -2,7 +2,7 @@ use hir::db::HirDatabase; use ra_syntax::{ ast, ast::{AstNode, AstToken, IfExpr, MatchArm}, - SyntaxElement, TextUnit, + TextUnit, }; use crate::{Assist, AssistCtx, AssistId}; @@ -18,10 +18,10 @@ pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx) -> Op ctx.add_action(AssistId("move_guard_to_arm_body"), "move guard to arm body", |edit| { edit.target(guard.syntax().text_range()); - let offseting_amount = match &space_before_guard { - Some(SyntaxElement::Token(tok)) => { + let offseting_amount = match space_before_guard.and_then(|it| it.into_token()) { + Some(tok) => { if let Some(_) = ast::Whitespace::cast(tok.clone()) { - let ele = space_before_guard.unwrap().text_range(); + let ele = tok.text_range(); edit.delete(ele); ele.len() } else { diff --git a/crates/ra_ide_api/Cargo.toml b/crates/ra_ide_api/Cargo.toml index c49a05de1..78a3db14d 100644 --- a/crates/ra_ide_api/Cargo.toml +++ b/crates/ra_ide_api/Cargo.toml @@ -5,6 +5,7 @@ version = "0.1.0" authors = ["rust-analyzer developers"] [dependencies] +format-buf = "1.0.0" itertools = "0.8.0" join_to_string = "0.1.3" log = "0.4.5" diff --git a/crates/ra_ide_api/src/display/short_label.rs b/crates/ra_ide_api/src/display/short_label.rs index be499e485..825a033ee 100644 --- a/crates/ra_ide_api/src/display/short_label.rs +++ b/crates/ra_ide_api/src/display/short_label.rs @@ -1,5 +1,4 @@ -use std::fmt::Write; - +use format_buf::format; use ra_syntax::ast::{self, AstNode, NameOwner, TypeAscriptionOwner, VisibilityOwner}; pub(crate) trait ShortLabel { @@ -73,7 +72,7 @@ where let mut buf = short_label_from_node(node, prefix)?; if let Some(type_ref) = node.ascribed_type() { - write!(buf, ": {}", type_ref.syntax()).unwrap(); + format!(buf, ": {}", type_ref.syntax()); } Some(buf) diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs index 140820df6..f78c562af 100644 --- a/crates/ra_ide_api/src/extend_selection.rs +++ b/crates/ra_ide_api/src/extend_selection.rs @@ -2,7 +2,7 @@ use ra_db::SourceDatabase; use ra_syntax::{ algo::{find_covering_element, find_token_at_offset, TokenAtOffset}, ast::{self, AstNode, AstToken}, - Direction, SyntaxElement, + Direction, NodeOrToken, SyntaxKind::*, SyntaxNode, SyntaxToken, TextRange, TextUnit, T, }; @@ -53,7 +53,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option { + NodeOrToken::Token(token) => { if token.text_range() != range { return Some(token.text_range()); } @@ -64,7 +64,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option node, + NodeOrToken::Node(node) => node, }; if node.text_range() != range { return Some(node.text_range()); @@ -153,8 +153,8 @@ fn extend_list_item(node: &SyntaxNode) -> Option { node.siblings_with_tokens(dir) .skip(1) .skip_while(|node| match node { - SyntaxElement::Node(_) => false, - SyntaxElement::Token(it) => is_single_line_ws(it), + NodeOrToken::Node(_) => false, + NodeOrToken::Token(it) => is_single_line_ws(it), }) .next() .and_then(|it| it.into_token()) diff --git a/crates/ra_ide_api/src/folding_ranges.rs b/crates/ra_ide_api/src/folding_ranges.rs index 571d1c595..e60ae8cf6 100644 --- a/crates/ra_ide_api/src/folding_ranges.rs +++ b/crates/ra_ide_api/src/folding_ranges.rs @@ -2,7 +2,7 @@ use rustc_hash::FxHashSet; use ra_syntax::{ ast::{self, AstNode, AstToken, VisibilityOwner}, - Direction, SourceFile, SyntaxElement, + Direction, NodeOrToken, SourceFile, SyntaxKind::{self, *}, SyntaxNode, TextRange, }; @@ -31,8 +31,8 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec { // Fold items that span multiple lines if let Some(kind) = fold_kind(element.kind()) { let is_multiline = match &element { - SyntaxElement::Node(node) => node.text().contains_char('\n'), - SyntaxElement::Token(token) => token.text().contains('\n'), + NodeOrToken::Node(node) => node.text().contains_char('\n'), + NodeOrToken::Token(token) => token.text().contains('\n'), }; if is_multiline { res.push(Fold { range: element.text_range(), kind }); @@ -41,7 +41,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec { } match element { - SyntaxElement::Token(token) => { + NodeOrToken::Token(token) => { // Fold groups of comments if let Some(comment) = ast::Comment::cast(token) { if !visited_comments.contains(&comment) { @@ -53,7 +53,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec { } } } - SyntaxElement::Node(node) => { + NodeOrToken::Node(node) => { // Fold groups of imports if node.kind() == USE_ITEM && !visited_imports.contains(&node) { if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) { @@ -108,7 +108,7 @@ fn contiguous_range_for_group_unless( let mut last = first.clone(); for element in first.siblings_with_tokens(Direction::Next) { let node = match element { - SyntaxElement::Token(token) => { + NodeOrToken::Token(token) => { if let Some(ws) = ast::Whitespace::cast(token) { if !ws.spans_multiple_lines() { // Ignore whitespace without blank lines @@ -119,7 +119,7 @@ fn contiguous_range_for_group_unless( // group ends here break; } - SyntaxElement::Node(node) => node, + NodeOrToken::Node(node) => node, }; // Stop if we find a node that doesn't belong to the group @@ -154,7 +154,7 @@ fn contiguous_range_for_comment( let mut last = first.clone(); for element in first.syntax().siblings_with_tokens(Direction::Next) { match element { - SyntaxElement::Token(token) => { + NodeOrToken::Token(token) => { if let Some(ws) = ast::Whitespace::cast(token.clone()) { if !ws.spans_multiple_lines() { // Ignore whitespace without blank lines @@ -173,7 +173,7 @@ fn contiguous_range_for_comment( // * A comment of a different flavor was reached break; } - SyntaxElement::Node(_) => break, + NodeOrToken::Node(_) => break, }; } diff --git a/crates/ra_ide_api/src/join_lines.rs b/crates/ra_ide_api/src/join_lines.rs index 7f25f2108..a2e4b6f3c 100644 --- a/crates/ra_ide_api/src/join_lines.rs +++ b/crates/ra_ide_api/src/join_lines.rs @@ -3,7 +3,7 @@ use ra_fmt::{compute_ws, extract_trivial_expression}; use ra_syntax::{ algo::{find_covering_element, non_trivia_sibling}, ast::{self, AstNode, AstToken}, - Direction, SourceFile, SyntaxElement, + Direction, NodeOrToken, SourceFile, SyntaxKind::{self, WHITESPACE}, SyntaxNode, SyntaxToken, TextRange, TextUnit, T, }; @@ -23,8 +23,8 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit { }; let node = match find_covering_element(file.syntax(), range) { - SyntaxElement::Node(node) => node, - SyntaxElement::Token(token) => token.parent(), + NodeOrToken::Node(node) => node, + NodeOrToken::Token(token) => token.parent(), }; let mut edit = TextEditBuilder::default(); for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) { diff --git a/crates/ra_ide_api/src/syntax_tree.rs b/crates/ra_ide_api/src/syntax_tree.rs index 76c50f6d6..a07e670fa 100644 --- a/crates/ra_ide_api/src/syntax_tree.rs +++ b/crates/ra_ide_api/src/syntax_tree.rs @@ -1,7 +1,7 @@ use crate::db::RootDatabase; use ra_db::SourceDatabase; use ra_syntax::{ - algo, AstNode, SourceFile, SyntaxElement, + algo, AstNode, NodeOrToken, SourceFile, SyntaxKind::{RAW_STRING, STRING}, SyntaxToken, TextRange, }; @@ -16,8 +16,8 @@ pub(crate) fn syntax_tree( let parse = db.parse(file_id); if let Some(text_range) = text_range { let node = match algo::find_covering_element(parse.tree().syntax(), text_range) { - SyntaxElement::Node(node) => node, - SyntaxElement::Token(token) => { + NodeOrToken::Node(node) => node, + NodeOrToken::Token(token) => { if let Some(tree) = syntax_tree_for_string(&token, text_range) { return tree; } diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 7ff0fc472..8225759e7 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs @@ -1,12 +1,13 @@ -use crate::subtree_source::SubtreeTokenSource; -use crate::ExpandError; use ra_parser::{ParseError, TreeSink}; use ra_syntax::{ - ast, AstNode, AstToken, Parse, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxNode, + ast, AstNode, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode, SyntaxTreeBuilder, TextRange, TextUnit, T, }; use tt::buffer::{Cursor, TokenBuffer}; +use crate::subtree_source::SubtreeTokenSource; +use crate::ExpandError; + /// Maps `tt::TokenId` to the relative range of the original token. #[derive(Default)] pub struct TokenMap { @@ -200,7 +201,7 @@ fn convert_tt( } match child { - SyntaxElement::Token(token) => { + NodeOrToken::Token(token) => { if let Some(doc_tokens) = convert_doc_comment(&token) { token_trees.extend(doc_tokens); } else if token.kind().is_trivia() { @@ -210,7 +211,7 @@ fn convert_tt( let char = token.text().chars().next().unwrap(); let spacing = match child_iter.peek() { - Some(SyntaxElement::Token(token)) => { + Some(NodeOrToken::Token(token)) => { if token.kind().is_punct() { tt::Spacing::Joint } else { @@ -241,7 +242,7 @@ fn convert_tt( token_trees.push(child); } } - SyntaxElement::Node(node) => { + NodeOrToken::Node(node) => { let child = convert_tt(token_map, global_offset, &node)?.into(); token_trees.push(child); } diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs index 9151b6ecd..192e9007d 100644 --- a/crates/ra_mbe/src/tests.rs +++ b/crates/ra_mbe/src/tests.rs @@ -1,4 +1,4 @@ -use ra_syntax::{ast, AstNode}; +use ra_syntax::{ast, AstNode, NodeOrToken}; use super::*; @@ -118,11 +118,11 @@ pub fn debug_dump_ignore_spaces(node: &ra_syntax::SyntaxNode) -> String { match event { WalkEvent::Enter(element) => { match element { - ra_syntax::SyntaxElement::Node(node) => { + NodeOrToken::Node(node) => { indent!(); writeln!(buf, "{:?}", node.kind()).unwrap(); } - ra_syntax::SyntaxElement::Token(token) => match token.kind() { + NodeOrToken::Token(token) => match token.kind() { ra_syntax::SyntaxKind::WHITESPACE => {} _ => { indent!(); diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml index a5565de33..97b6b047f 100644 --- a/crates/ra_syntax/Cargo.toml +++ b/crates/ra_syntax/Cargo.toml @@ -10,7 +10,7 @@ repository = "https://github.com/rust-analyzer/rust-analyzer" [dependencies] unicode-xid = "0.1.0" itertools = "0.8.0" -rowan = "0.5.6" +rowan = "0.6.0-pre.1" # ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here # to reduce number of compilations diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index f47e11e66..6bb46b021 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs @@ -1,17 +1,18 @@ pub mod visit; +use std::ops::RangeInclusive; + use itertools::Itertools; -use crate::{AstNode, Direction, SyntaxElement, SyntaxNode, SyntaxToken, TextRange, TextUnit}; +use crate::{ + AstNode, Direction, InsertPosition, NodeOrToken, SourceFile, SyntaxElement, SyntaxNode, + SyntaxNodePtr, SyntaxToken, TextRange, TextUnit, +}; pub use rowan::TokenAtOffset; pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffset { - match node.0.token_at_offset(offset) { - TokenAtOffset::None => TokenAtOffset::None, - TokenAtOffset::Single(n) => TokenAtOffset::Single(SyntaxToken(n)), - TokenAtOffset::Between(l, r) => TokenAtOffset::Between(SyntaxToken(l), SyntaxToken(r)), - } + node.token_at_offset(offset) } /// Returns ancestors of the node at the offset, sorted by length. This should @@ -44,20 +45,110 @@ pub fn find_node_at_offset(syntax: &SyntaxNode, offset: TextUnit) -> /// Finds the first sibling in the given direction which is not `trivia` pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option { return match element { - SyntaxElement::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia), - SyntaxElement::Token(token) => { - token.siblings_with_tokens(direction).skip(1).find(not_trivia) - } + NodeOrToken::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia), + NodeOrToken::Token(token) => token.siblings_with_tokens(direction).skip(1).find(not_trivia), }; fn not_trivia(element: &SyntaxElement) -> bool { match element { - SyntaxElement::Node(_) => true, - SyntaxElement::Token(token) => !token.kind().is_trivia(), + NodeOrToken::Node(_) => true, + NodeOrToken::Token(token) => !token.kind().is_trivia(), } } } pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement { - SyntaxElement::new(root.0.covering_node(range)) + root.covering_element(range) +} + +/// Adds specified children (tokens or nodes) to the current node at the +/// specific position. +/// +/// This is a type-unsafe low-level editing API, if you need to use it, +/// prefer to create a type-safe abstraction on top of it instead. +pub fn insert_children( + parent: &SyntaxNode, + position: InsertPosition, + to_insert: impl Iterator, +) -> SyntaxNode { + let mut delta = TextUnit::default(); + let to_insert = to_insert.map(|element| { + delta += element.text_range().len(); + to_green_element(element) + }); + + let old_children = parent.green().children(); + + let new_children = match &position { + InsertPosition::First => { + to_insert.chain(old_children.iter().cloned()).collect::>() + } + InsertPosition::Last => old_children.iter().cloned().chain(to_insert).collect::>(), + InsertPosition::Before(anchor) | InsertPosition::After(anchor) => { + let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 }; + let split_at = position_of_child(parent, anchor.clone()) + take_anchor; + let (before, after) = old_children.split_at(split_at); + before + .iter() + .cloned() + .chain(to_insert) + .chain(after.iter().cloned()) + .collect::>() + } + }; + + with_children(parent, new_children) +} + +/// Replaces all nodes in `to_delete` with nodes from `to_insert` +/// +/// This is a type-unsafe low-level editing API, if you need to use it, +/// prefer to create a type-safe abstraction on top of it instead. +pub fn replace_children( + parent: &SyntaxNode, + to_delete: RangeInclusive, + to_insert: impl Iterator, +) -> SyntaxNode { + let start = position_of_child(parent, to_delete.start().clone()); + let end = position_of_child(parent, to_delete.end().clone()); + let old_children = parent.green().children(); + + let new_children = old_children[..start] + .iter() + .cloned() + .chain(to_insert.map(to_green_element)) + .chain(old_children[end + 1..].iter().cloned()) + .collect::>(); + with_children(parent, new_children) +} + +fn with_children( + parent: &SyntaxNode, + new_children: Box<[NodeOrToken]>, +) -> SyntaxNode { + let len = new_children.iter().map(|it| it.text_len()).sum::(); + let new_node = + rowan::GreenNode::new(rowan::cursor::SyntaxKind(parent.kind() as u16), new_children); + let new_file_node = parent.replace_with(new_node); + let file = SourceFile::new(new_file_node); + + // FIXME: use a more elegant way to re-fetch the node (#1185), make + // `range` private afterwards + let mut ptr = SyntaxNodePtr::new(parent); + ptr.range = TextRange::offset_len(ptr.range().start(), len); + ptr.to_node(file.syntax()).to_owned() +} + +fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize { + parent + .children_with_tokens() + .position(|it| it == child) + .expect("element is not a child of current element") +} + +fn to_green_element(element: SyntaxElement) -> NodeOrToken { + match element { + NodeOrToken::Node(it) => it.green().clone().into(), + NodeOrToken::Token(it) => it.green().clone().into(), + } } diff --git a/crates/ra_syntax/src/ast/expr_extensions.rs b/crates/ra_syntax/src/ast/expr_extensions.rs index 139bd3ec0..f9190d877 100644 --- a/crates/ra_syntax/src/ast/expr_extensions.rs +++ b/crates/ra_syntax/src/ast/expr_extensions.rs @@ -2,7 +2,7 @@ use crate::{ ast::{self, child_opt, children, AstChildren, AstNode}, - SmolStr, SyntaxElement, + SmolStr, SyntaxKind::*, SyntaxToken, T, }; @@ -229,14 +229,11 @@ pub enum LiteralKind { impl ast::Literal { pub fn token(&self) -> SyntaxToken { - let elem = self - .syntax() + self.syntax() .children_with_tokens() - .find(|e| e.kind() != ATTR && !e.kind().is_trivia()); - match elem { - Some(SyntaxElement::Token(token)) => token, - _ => unreachable!(), - } + .find(|e| e.kind() != ATTR && !e.kind().is_trivia()) + .and_then(|e| e.into_token()) + .unwrap() } pub fn kind(&self) -> LiteralKind { diff --git a/crates/ra_syntax/src/ast/extensions.rs b/crates/ra_syntax/src/ast/extensions.rs index 753fc42c6..d4873b39a 100644 --- a/crates/ra_syntax/src/ast/extensions.rs +++ b/crates/ra_syntax/src/ast/extensions.rs @@ -24,10 +24,7 @@ impl ast::NameRef { } fn text_of_first_token(node: &SyntaxNode) -> &SmolStr { - match node.0.green().children().first() { - Some(rowan::GreenElement::Token(it)) => it.text(), - _ => panic!(), - } + node.green().children().first().and_then(|it| it.as_token()).unwrap().text() } impl ast::Attr { diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index 8af04c136..21c07d69a 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs @@ -20,7 +20,6 @@ //! [Swift]: mod syntax_node; -mod syntax_text; mod syntax_error; mod parsing; mod validation; @@ -43,14 +42,13 @@ pub use crate::{ ptr::{AstPtr, SyntaxNodePtr}, syntax_error::{Location, SyntaxError, SyntaxErrorKind}, syntax_node::{ - Direction, InsertPosition, SyntaxElement, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, - WalkEvent, + Direction, InsertPosition, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken, + SyntaxTreeBuilder, WalkEvent, }, - syntax_text::SyntaxText, }; pub use ra_parser::SyntaxKind; pub use ra_parser::T; -pub use rowan::{SmolStr, TextRange, TextUnit}; +pub use rowan::{SmolStr, SyntaxText, TextRange, TextUnit}; /// `Parse` is the result of the parsing: a syntax tree and a collection of /// errors. @@ -76,7 +74,7 @@ impl Parse { } pub fn syntax_node(&self) -> SyntaxNode { - SyntaxNode::new(self.green.clone()) + SyntaxNode::new_root(self.green.clone()) } } @@ -147,7 +145,7 @@ pub use crate::ast::SourceFile; impl SourceFile { fn new(green: GreenNode) -> SourceFile { - let root = SyntaxNode::new(green); + let root = SyntaxNode::new_root(green); if cfg!(debug_assertions) { validation::validate_block_structure(&root); } @@ -267,8 +265,8 @@ fn api_walkthrough() { match event { WalkEvent::Enter(node) => { let text = match &node { - SyntaxElement::Node(it) => it.text().to_string(), - SyntaxElement::Token(it) => it.text().to_string(), + NodeOrToken::Node(it) => it.text().to_string(), + NodeOrToken::Token(it) => it.text().to_string(), }; buf += &format!("{:indent$}{:?} {:?}\n", " ", text, node.kind(), indent = indent); indent += 2; diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs index 2f388bdfe..65b8aa10d 100644 --- a/crates/ra_syntax/src/parsing/reparsing.rs +++ b/crates/ra_syntax/src/parsing/reparsing.rs @@ -16,7 +16,7 @@ use crate::{ text_token_source::TextTokenSource, text_tree_sink::TextTreeSink, }, - syntax_node::{GreenNode, GreenToken, SyntaxElement, SyntaxNode}, + syntax_node::{GreenNode, GreenToken, NodeOrToken, SyntaxElement, SyntaxNode}, SyntaxError, SyntaxKind::*, TextRange, TextUnit, T, @@ -70,7 +70,8 @@ fn reparse_token<'node>( } } - let new_token = GreenToken::new(rowan::SyntaxKind(token.kind().into()), text.into()); + let new_token = + GreenToken::new(rowan::cursor::SyntaxKind(token.kind().into()), text.into()); Some((token.replace_with(new_token), token.text_range())) } _ => None, @@ -98,8 +99,8 @@ fn get_text_after_edit(element: SyntaxElement, edit: &AtomTextEdit) -> String { let edit = AtomTextEdit::replace(edit.delete - element.text_range().start(), edit.insert.clone()); let text = match element { - SyntaxElement::Token(token) => token.text().to_string(), - SyntaxElement::Node(node) => node.text().to_string(), + NodeOrToken::Token(token) => token.text().to_string(), + NodeOrToken::Node(node) => node.text().to_string(), }; edit.apply(text) } @@ -114,8 +115,8 @@ fn is_contextual_kw(text: &str) -> bool { fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> { let node = algo::find_covering_element(node, range); let mut ancestors = match node { - SyntaxElement::Token(it) => it.parent().ancestors(), - SyntaxElement::Node(it) => it.ancestors(), + NodeOrToken::Token(it) => it.parent().ancestors(), + NodeOrToken::Node(it) => it.ancestors(), }; ancestors.find_map(|node| { let first_child = node.first_child_or_token().map(|it| it.kind()); diff --git a/crates/ra_syntax/src/syntax_node.rs b/crates/ra_syntax/src/syntax_node.rs index c42045d77..689dbefde 100644 --- a/crates/ra_syntax/src/syntax_node.rs +++ b/crates/ra_syntax/src/syntax_node.rs @@ -6,15 +6,12 @@ //! The *real* implementation is in the (language-agnostic) `rowan` crate, this //! modules just wraps its API. -use std::{fmt, iter::successors, ops::RangeInclusive}; - use ra_parser::ParseError; -use rowan::GreenNodeBuilder; +use rowan::{GreenNodeBuilder, Language}; use crate::{ syntax_error::{SyntaxError, SyntaxErrorKind}, - AstNode, Parse, SmolStr, SourceFile, SyntaxKind, SyntaxNodePtr, SyntaxText, TextRange, - TextUnit, + Parse, SmolStr, SyntaxKind, TextUnit, }; pub use rowan::WalkEvent; @@ -28,465 +25,27 @@ pub enum InsertPosition { After(T), } -#[derive(PartialEq, Eq, Hash, Clone)] -pub struct SyntaxNode(pub(crate) rowan::cursor::SyntaxNode); - -impl fmt::Debug for SyntaxNode { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - if f.alternate() { - let mut level = 0; - for event in self.preorder_with_tokens() { - match event { - WalkEvent::Enter(element) => { - for _ in 0..level { - write!(f, " ")?; - } - match element { - SyntaxElement::Node(node) => writeln!(f, "{:?}", node)?, - SyntaxElement::Token(token) => writeln!(f, "{:?}", token)?, - } - level += 1; - } - WalkEvent::Leave(_) => level -= 1, - } - } - assert_eq!(level, 0); - Ok(()) - } else { - write!(f, "{:?}@{:?}", self.kind(), self.text_range()) - } - } -} - -impl fmt::Display for SyntaxNode { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.text(), fmt) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum Direction { - Next, - Prev, -} - -impl SyntaxNode { - pub(crate) fn new(green: GreenNode) -> SyntaxNode { - let inner = rowan::cursor::SyntaxNode::new_root(green); - SyntaxNode(inner) - } - - pub fn kind(&self) -> SyntaxKind { - self.0.kind().0.into() - } - - pub fn text_range(&self) -> TextRange { - self.0.text_range() - } - - pub fn text(&self) -> SyntaxText { - SyntaxText::new(self.clone()) - } - - pub fn parent(&self) -> Option { - self.0.parent().map(SyntaxNode) - } - - pub fn first_child(&self) -> Option { - self.0.first_child().map(SyntaxNode) - } - - pub fn first_child_or_token(&self) -> Option { - self.0.first_child_or_token().map(SyntaxElement::new) - } - - pub fn last_child(&self) -> Option { - self.0.last_child().map(SyntaxNode) - } - - pub fn last_child_or_token(&self) -> Option { - self.0.last_child_or_token().map(SyntaxElement::new) - } - - pub fn next_sibling(&self) -> Option { - self.0.next_sibling().map(SyntaxNode) - } - - pub fn next_sibling_or_token(&self) -> Option { - self.0.next_sibling_or_token().map(SyntaxElement::new) - } - - pub fn prev_sibling(&self) -> Option { - self.0.prev_sibling().map(SyntaxNode) - } - - pub fn prev_sibling_or_token(&self) -> Option { - self.0.prev_sibling_or_token().map(SyntaxElement::new) - } - - pub fn children(&self) -> SyntaxNodeChildren { - SyntaxNodeChildren(self.0.children()) - } - - pub fn children_with_tokens(&self) -> SyntaxElementChildren { - SyntaxElementChildren(self.0.children_with_tokens()) - } - - pub fn first_token(&self) -> Option { - self.0.first_token().map(SyntaxToken) - } - - pub fn last_token(&self) -> Option { - self.0.last_token().map(SyntaxToken) - } - - pub fn ancestors(&self) -> impl Iterator { - successors(Some(self.clone()), |node| node.parent()) - } - - pub fn descendants(&self) -> impl Iterator { - self.preorder().filter_map(|event| match event { - WalkEvent::Enter(node) => Some(node), - WalkEvent::Leave(_) => None, - }) - } - - pub fn descendants_with_tokens(&self) -> impl Iterator { - self.preorder_with_tokens().filter_map(|event| match event { - WalkEvent::Enter(it) => Some(it), - WalkEvent::Leave(_) => None, - }) - } - - pub fn siblings(&self, direction: Direction) -> impl Iterator { - successors(Some(self.clone()), move |node| match direction { - Direction::Next => node.next_sibling(), - Direction::Prev => node.prev_sibling(), - }) - } - - pub fn siblings_with_tokens( - &self, - direction: Direction, - ) -> impl Iterator { - let me: SyntaxElement = self.clone().into(); - successors(Some(me), move |el| match direction { - Direction::Next => el.next_sibling_or_token(), - Direction::Prev => el.prev_sibling_or_token(), - }) - } +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum RustLanguage {} +impl Language for RustLanguage { + type Kind = SyntaxKind; - pub fn preorder(&self) -> impl Iterator> { - self.0.preorder().map(|event| match event { - WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode(n)), - WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxNode(n)), - }) + fn kind_from_raw(raw: rowan::cursor::SyntaxKind) -> SyntaxKind { + SyntaxKind::from(raw.0) } - pub fn preorder_with_tokens(&self) -> impl Iterator> { - self.0.preorder_with_tokens().map(|event| match event { - WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxElement::new(n)), - WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxElement::new(n)), - }) - } - - pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode { - self.0.replace_with(replacement) - } - - /// Adds specified children (tokens or nodes) to the current node at the - /// specific position. - /// - /// This is a type-unsafe low-level editing API, if you need to use it, - /// prefer to create a type-safe abstraction on top of it instead. - pub fn insert_children( - &self, - position: InsertPosition, - to_insert: impl Iterator, - ) -> SyntaxNode { - let mut delta = TextUnit::default(); - let to_insert = to_insert.map(|element| { - delta += element.text_len(); - to_green_element(element) - }); - - let old_children = self.0.green().children(); - - let new_children = match &position { - InsertPosition::First => { - to_insert.chain(old_children.iter().cloned()).collect::>() - } - InsertPosition::Last => { - old_children.iter().cloned().chain(to_insert).collect::>() - } - InsertPosition::Before(anchor) | InsertPosition::After(anchor) => { - let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 }; - let split_at = self.position_of_child(anchor.clone()) + take_anchor; - let (before, after) = old_children.split_at(split_at); - before - .iter() - .cloned() - .chain(to_insert) - .chain(after.iter().cloned()) - .collect::>() - } - }; - - self.with_children(new_children) - } - - /// Replaces all nodes in `to_delete` with nodes from `to_insert` - /// - /// This is a type-unsafe low-level editing API, if you need to use it, - /// prefer to create a type-safe abstraction on top of it instead. - pub fn replace_children( - &self, - to_delete: RangeInclusive, - to_insert: impl Iterator, - ) -> SyntaxNode { - let start = self.position_of_child(to_delete.start().clone()); - let end = self.position_of_child(to_delete.end().clone()); - let old_children = self.0.green().children(); - - let new_children = old_children[..start] - .iter() - .cloned() - .chain(to_insert.map(to_green_element)) - .chain(old_children[end + 1..].iter().cloned()) - .collect::>(); - self.with_children(new_children) - } - - fn with_children(&self, new_children: Box<[rowan::GreenElement]>) -> SyntaxNode { - let len = new_children.iter().map(|it| it.text_len()).sum::(); - let new_node = GreenNode::new(rowan::SyntaxKind(self.kind() as u16), new_children); - let new_file_node = self.replace_with(new_node); - let file = SourceFile::new(new_file_node); - - // FIXME: use a more elegant way to re-fetch the node (#1185), make - // `range` private afterwards - let mut ptr = SyntaxNodePtr::new(self); - ptr.range = TextRange::offset_len(ptr.range().start(), len); - ptr.to_node(file.syntax()).to_owned() - } - - fn position_of_child(&self, child: SyntaxElement) -> usize { - self.children_with_tokens() - .position(|it| it == child) - .expect("element is not a child of current element") + fn kind_to_raw(kind: SyntaxKind) -> rowan::cursor::SyntaxKind { + rowan::cursor::SyntaxKind(kind.into()) } } -fn to_green_element(element: SyntaxElement) -> rowan::GreenElement { - match element { - SyntaxElement::Node(node) => node.0.green().clone().into(), - SyntaxElement::Token(tok) => { - GreenToken::new(rowan::SyntaxKind(tok.kind() as u16), tok.text().clone()).into() - } - } -} - -#[derive(Clone, PartialEq, Eq, Hash)] -pub struct SyntaxToken(pub(crate) rowan::cursor::SyntaxToken); - -impl fmt::Debug for SyntaxToken { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - write!(fmt, "{:?}@{:?}", self.kind(), self.text_range())?; - if self.text().len() < 25 { - return write!(fmt, " {:?}", self.text()); - } - let text = self.text().as_str(); - for idx in 21..25 { - if text.is_char_boundary(idx) { - let text = format!("{} ...", &text[..idx]); - return write!(fmt, " {:?}", text); - } - } - unreachable!() - } -} +pub type SyntaxNode = rowan::SyntaxNode; +pub type SyntaxToken = rowan::SyntaxToken; +pub type SyntaxElement = rowan::NodeOrToken; +pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren; +pub type SyntaxElementChildren = rowan::SyntaxElementChildren; -impl fmt::Display for SyntaxToken { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(self.text(), fmt) - } -} - -impl SyntaxToken { - pub fn kind(&self) -> SyntaxKind { - self.0.kind().0.into() - } - - pub fn text(&self) -> &SmolStr { - self.0.text() - } - - pub fn text_range(&self) -> TextRange { - self.0.text_range() - } - - pub fn parent(&self) -> SyntaxNode { - SyntaxNode(self.0.parent()) - } - - pub fn next_sibling_or_token(&self) -> Option { - self.0.next_sibling_or_token().map(SyntaxElement::new) - } - - pub fn prev_sibling_or_token(&self) -> Option { - self.0.prev_sibling_or_token().map(SyntaxElement::new) - } - - pub fn siblings_with_tokens( - &self, - direction: Direction, - ) -> impl Iterator { - let me: SyntaxElement = self.clone().into(); - successors(Some(me), move |el| match direction { - Direction::Next => el.next_sibling_or_token(), - Direction::Prev => el.prev_sibling_or_token(), - }) - } - - pub fn next_token(&self) -> Option { - self.0.next_token().map(SyntaxToken) - } - - pub fn prev_token(&self) -> Option { - self.0.prev_token().map(SyntaxToken) - } - - pub(crate) fn replace_with(&self, new_token: GreenToken) -> GreenNode { - self.0.replace_with(new_token) - } -} - -#[derive(Debug, PartialEq, Eq, Hash, Clone)] -pub enum SyntaxElement { - Node(SyntaxNode), - Token(SyntaxToken), -} - -impl From for SyntaxElement { - fn from(node: SyntaxNode) -> Self { - SyntaxElement::Node(node) - } -} - -impl From for SyntaxElement { - fn from(token: SyntaxToken) -> Self { - SyntaxElement::Token(token) - } -} - -impl fmt::Display for SyntaxElement { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - match self { - SyntaxElement::Node(it) => fmt::Display::fmt(it, fmt), - SyntaxElement::Token(it) => fmt::Display::fmt(it, fmt), - } - } -} - -impl SyntaxElement { - pub(crate) fn new(el: rowan::cursor::SyntaxElement) -> Self { - match el { - rowan::cursor::SyntaxElement::Node(it) => SyntaxElement::Node(SyntaxNode(it)), - rowan::cursor::SyntaxElement::Token(it) => SyntaxElement::Token(SyntaxToken(it)), - } - } - - pub fn kind(&self) -> SyntaxKind { - match self { - SyntaxElement::Node(it) => it.kind(), - SyntaxElement::Token(it) => it.kind(), - } - } - - pub fn as_node(&self) -> Option<&SyntaxNode> { - match self { - SyntaxElement::Node(node) => Some(node), - SyntaxElement::Token(_) => None, - } - } - - pub fn into_node(self) -> Option { - match self { - SyntaxElement::Node(node) => Some(node), - SyntaxElement::Token(_) => None, - } - } - - pub fn as_token(&self) -> Option<&SyntaxToken> { - match self { - SyntaxElement::Node(_) => None, - SyntaxElement::Token(token) => Some(token), - } - } - - pub fn into_token(self) -> Option { - match self { - SyntaxElement::Node(_) => None, - SyntaxElement::Token(token) => Some(token), - } - } - - pub fn next_sibling_or_token(&self) -> Option { - match self { - SyntaxElement::Node(it) => it.next_sibling_or_token(), - SyntaxElement::Token(it) => it.next_sibling_or_token(), - } - } - - pub fn prev_sibling_or_token(&self) -> Option { - match self { - SyntaxElement::Node(it) => it.prev_sibling_or_token(), - SyntaxElement::Token(it) => it.prev_sibling_or_token(), - } - } - - pub fn ancestors(&self) -> impl Iterator { - match self { - SyntaxElement::Node(it) => it.clone(), - SyntaxElement::Token(it) => it.parent(), - } - .ancestors() - } - - pub fn text_range(&self) -> TextRange { - match self { - SyntaxElement::Node(it) => it.text_range(), - SyntaxElement::Token(it) => it.text_range(), - } - } - - fn text_len(&self) -> TextUnit { - match self { - SyntaxElement::Node(node) => node.0.green().text_len(), - SyntaxElement::Token(token) => TextUnit::of_str(token.0.text()), - } - } -} - -#[derive(Clone, Debug)] -pub struct SyntaxNodeChildren(rowan::cursor::SyntaxNodeChildren); - -impl Iterator for SyntaxNodeChildren { - type Item = SyntaxNode; - fn next(&mut self) -> Option { - self.0.next().map(SyntaxNode) - } -} - -#[derive(Clone, Debug)] -pub struct SyntaxElementChildren(rowan::cursor::SyntaxElementChildren); - -impl Iterator for SyntaxElementChildren { - type Item = SyntaxElement; - fn next(&mut self) -> Option { - self.0.next().map(SyntaxElement::new) - } -} +pub use rowan::{Direction, NodeOrToken}; pub struct SyntaxTreeBuilder { errors: Vec, @@ -507,19 +66,21 @@ impl SyntaxTreeBuilder { pub fn finish(self) -> Parse { let (green, errors) = self.finish_raw(); - let node = SyntaxNode::new(green); + let node = SyntaxNode::new_root(green); if cfg!(debug_assertions) { crate::validation::validate_block_structure(&node); } - Parse::new(node.0.green().clone(), errors) + Parse::new(node.green().clone(), errors) } pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) { - self.inner.token(rowan::SyntaxKind(kind.into()), text) + let kind = RustLanguage::kind_to_raw(kind); + self.inner.token(kind, text) } pub fn start_node(&mut self, kind: SyntaxKind) { - self.inner.start_node(rowan::SyntaxKind(kind.into())) + let kind = RustLanguage::kind_to_raw(kind); + self.inner.start_node(kind) } pub fn finish_node(&mut self) { diff --git a/crates/ra_syntax/src/syntax_text.rs b/crates/ra_syntax/src/syntax_text.rs deleted file mode 100644 index 652cb7a1e..000000000 --- a/crates/ra_syntax/src/syntax_text.rs +++ /dev/null @@ -1,178 +0,0 @@ -use std::{ - fmt, - ops::{self, Bound}, -}; - -use crate::{SmolStr, SyntaxElement, SyntaxNode, TextRange, TextUnit}; - -#[derive(Clone)] -pub struct SyntaxText { - node: SyntaxNode, - range: TextRange, -} - -impl SyntaxText { - pub(crate) fn new(node: SyntaxNode) -> SyntaxText { - let range = node.text_range(); - SyntaxText { node, range } - } - - pub fn try_fold_chunks(&self, init: T, mut f: F) -> Result - where - F: FnMut(T, &str) -> Result, - { - self.node.descendants_with_tokens().try_fold(init, move |acc, element| { - let res = match element { - SyntaxElement::Token(token) => { - let range = match self.range.intersection(&token.text_range()) { - None => return Ok(acc), - Some(it) => it, - }; - let slice = if range == token.text_range() { - token.text() - } else { - let range = range - token.text_range().start(); - &token.text()[range] - }; - f(acc, slice)? - } - SyntaxElement::Node(_) => acc, - }; - Ok(res) - }) - } - - pub fn try_for_each_chunk Result<(), E>, E>( - &self, - mut f: F, - ) -> Result<(), E> { - self.try_fold_chunks((), move |(), chunk| f(chunk)) - } - - pub fn for_each_chunk(&self, mut f: F) { - enum Void {} - match self.try_for_each_chunk(|chunk| Ok::<(), Void>(f(chunk))) { - Ok(()) => (), - Err(void) => match void {}, - } - } - - pub fn to_smol_string(&self) -> SmolStr { - self.to_string().into() - } - - pub fn contains_char(&self, c: char) -> bool { - self.try_for_each_chunk(|chunk| if chunk.contains(c) { Err(()) } else { Ok(()) }).is_err() - } - - pub fn find_char(&self, c: char) -> Option { - let mut acc: TextUnit = 0.into(); - let res = self.try_for_each_chunk(|chunk| { - if let Some(pos) = chunk.find(c) { - let pos: TextUnit = (pos as u32).into(); - return Err(acc + pos); - } - acc += TextUnit::of_str(chunk); - Ok(()) - }); - found(res) - } - - pub fn len(&self) -> TextUnit { - self.range.len() - } - - pub fn is_empty(&self) -> bool { - self.range.is_empty() - } - - pub fn slice(&self, range: impl ops::RangeBounds) -> SyntaxText { - let start = match range.start_bound() { - Bound::Included(&b) => b, - Bound::Excluded(_) => panic!("utf-aware slicing can't work this way"), - Bound::Unbounded => 0.into(), - }; - let end = match range.end_bound() { - Bound::Included(_) => panic!("utf-aware slicing can't work this way"), - Bound::Excluded(&b) => b, - Bound::Unbounded => self.len(), - }; - assert!(start <= end); - let len = end - start; - let start = self.range.start() + start; - let end = start + len; - assert!( - start <= end, - "invalid slice, range: {:?}, slice: {:?}", - self.range, - (range.start_bound(), range.end_bound()), - ); - let range = TextRange::from_to(start, end); - assert!( - range.is_subrange(&self.range), - "invalid slice, range: {:?}, slice: {:?}", - self.range, - range, - ); - SyntaxText { node: self.node.clone(), range } - } - - pub fn char_at(&self, offset: impl Into) -> Option { - let offset = offset.into(); - let mut start: TextUnit = 0.into(); - let res = self.try_for_each_chunk(|chunk| { - let end = start + TextUnit::of_str(chunk); - if start <= offset && offset < end { - let off: usize = u32::from(offset - start) as usize; - return Err(chunk[off..].chars().next().unwrap()); - } - start = end; - Ok(()) - }); - found(res) - } -} - -fn found(res: Result<(), T>) -> Option { - match res { - Ok(()) => None, - Err(it) => Some(it), - } -} - -impl fmt::Debug for SyntaxText { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Debug::fmt(&self.to_string(), f) - } -} - -impl fmt::Display for SyntaxText { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.try_for_each_chunk(|chunk| fmt::Display::fmt(chunk, f)) - } -} - -impl From for String { - fn from(text: SyntaxText) -> String { - text.to_string() - } -} - -impl PartialEq for SyntaxText { - fn eq(&self, mut rhs: &str) -> bool { - self.try_for_each_chunk(|chunk| { - if !rhs.starts_with(chunk) { - return Err(()); - } - rhs = &rhs[chunk.len()..]; - Ok(()) - }) - .is_ok() - } -} - -impl PartialEq<&'_ str> for SyntaxText { - fn eq(&self, rhs: &&str) -> bool { - self == *rhs - } -} -- cgit v1.2.3 From 62be91b82d6368a20a40893b199bc4f7a35a2223 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sun, 21 Jul 2019 13:08:32 +0300 Subject: minor, move type --- crates/ra_syntax/src/algo.rs | 12 ++++++++++-- crates/ra_syntax/src/lib.rs | 5 +++-- crates/ra_syntax/src/syntax_node.rs | 8 -------- 3 files changed, 13 insertions(+), 12 deletions(-) diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index 6bb46b021..ecd42c133 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs @@ -5,8 +5,8 @@ use std::ops::RangeInclusive; use itertools::Itertools; use crate::{ - AstNode, Direction, InsertPosition, NodeOrToken, SourceFile, SyntaxElement, SyntaxNode, - SyntaxNodePtr, SyntaxToken, TextRange, TextUnit, + AstNode, Direction, NodeOrToken, SourceFile, SyntaxElement, SyntaxNode, SyntaxNodePtr, + SyntaxToken, TextRange, TextUnit, }; pub use rowan::TokenAtOffset; @@ -61,6 +61,14 @@ pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxEleme root.covering_element(range) } +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum InsertPosition { + First, + Last, + Before(T), + After(T), +} + /// Adds specified children (tokens or nodes) to the current node at the /// specific position. /// diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index 21c07d69a..7b778f38c 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs @@ -37,13 +37,14 @@ use ra_text_edit::AtomTextEdit; use crate::syntax_node::GreenNode; pub use crate::{ + algo::InsertPosition, ast::{AstNode, AstToken}, parsing::{classify_literal, tokenize, Token}, ptr::{AstPtr, SyntaxNodePtr}, syntax_error::{Location, SyntaxError, SyntaxErrorKind}, syntax_node::{ - Direction, InsertPosition, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken, - SyntaxTreeBuilder, WalkEvent, + Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, + WalkEvent, }, }; pub use ra_parser::SyntaxKind; diff --git a/crates/ra_syntax/src/syntax_node.rs b/crates/ra_syntax/src/syntax_node.rs index 689dbefde..95795a27a 100644 --- a/crates/ra_syntax/src/syntax_node.rs +++ b/crates/ra_syntax/src/syntax_node.rs @@ -17,14 +17,6 @@ use crate::{ pub use rowan::WalkEvent; pub(crate) use rowan::{GreenNode, GreenToken}; -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum InsertPosition { - First, - Last, - Before(T), - After(T), -} - #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum RustLanguage {} impl Language for RustLanguage { -- cgit v1.2.3 From d52ee59a712932bc381d8c690dc2f681598760fe Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sun, 21 Jul 2019 13:28:58 +0300 Subject: streamline API --- crates/ra_assists/src/assist_ctx.rs | 5 +++-- crates/ra_hir/src/ty/tests.rs | 6 ++---- crates/ra_ide_api/src/completion/completion_context.rs | 4 ++-- crates/ra_ide_api/src/extend_selection.rs | 6 +++--- crates/ra_ide_api/src/goto_type_definition.rs | 4 ++-- crates/ra_ide_api/src/matching_brace.rs | 6 ++++-- crates/ra_ide_api/src/typing.rs | 15 ++++++++++----- crates/ra_syntax/src/algo.rs | 17 +++++------------ crates/ra_syntax/src/lib.rs | 6 ++---- crates/ra_syntax/src/syntax_node.rs | 1 - 10 files changed, 33 insertions(+), 37 deletions(-) diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs index 4d5a76de6..a12c3ed54 100644 --- a/crates/ra_assists/src/assist_ctx.rs +++ b/crates/ra_assists/src/assist_ctx.rs @@ -2,8 +2,9 @@ use hir::db::HirDatabase; use ra_db::FileRange; use ra_fmt::{leading_indent, reindent}; use ra_syntax::{ - algo::{find_covering_element, find_node_at_offset, find_token_at_offset, TokenAtOffset}, + algo::{find_covering_element, find_node_at_offset}, AstNode, SourceFile, SyntaxElement, SyntaxNode, SyntaxToken, TextRange, TextUnit, + TokenAtOffset, }; use ra_text_edit::TextEditBuilder; @@ -105,7 +106,7 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> { } pub(crate) fn token_at_offset(&self) -> TokenAtOffset { - find_token_at_offset(self.source_file.syntax(), self.frange.range.start()) + self.source_file.syntax().token_at_offset(self.frange.range.start()) } pub(crate) fn node_at_offset(&self) -> Option { diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index 706500484..676711d0a 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs @@ -3211,8 +3211,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { ); { let file = db.parse(pos.file_id).ok().unwrap(); - let node = - algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); + let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent(); let events = db.log_executed(|| { SourceAnalyzer::new(&db, pos.file_id, &node, None); }); @@ -3232,8 +3231,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { { let file = db.parse(pos.file_id).ok().unwrap(); - let node = - algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); + let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent(); let events = db.log_executed(|| { SourceAnalyzer::new(&db, pos.file_id, &node, None); }); diff --git a/crates/ra_ide_api/src/completion/completion_context.rs b/crates/ra_ide_api/src/completion/completion_context.rs index 2f78d5409..968f5694b 100644 --- a/crates/ra_ide_api/src/completion/completion_context.rs +++ b/crates/ra_ide_api/src/completion/completion_context.rs @@ -1,6 +1,6 @@ use hir::source_binder; use ra_syntax::{ - algo::{find_covering_element, find_node_at_offset, find_token_at_offset}, + algo::{find_covering_element, find_node_at_offset}, ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxNode, SyntaxToken, TextRange, TextUnit, @@ -48,7 +48,7 @@ impl<'a> CompletionContext<'a> { ) -> Option> { let module = source_binder::module_from_position(db, position); let token = - find_token_at_offset(original_parse.tree().syntax(), position.offset).left_biased()?; + original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?; let analyzer = hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset)); let mut ctx = CompletionContext { diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs index f78c562af..edbf622c1 100644 --- a/crates/ra_ide_api/src/extend_selection.rs +++ b/crates/ra_ide_api/src/extend_selection.rs @@ -1,10 +1,10 @@ use ra_db::SourceDatabase; use ra_syntax::{ - algo::{find_covering_element, find_token_at_offset, TokenAtOffset}, + algo::find_covering_element, ast::{self, AstNode, AstToken}, Direction, NodeOrToken, SyntaxKind::*, - SyntaxNode, SyntaxToken, TextRange, TextUnit, T, + SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T, }; use crate::{db::RootDatabase, FileRange}; @@ -34,7 +34,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option Option>> { let parse = db.parse(position.file_id); - let node = find_token_at_offset(parse.tree().syntax(), position.offset).find_map(|token| { + let node = parse.tree().syntax().token_at_offset(position.offset).find_map(|token| { token .parent() .ancestors() diff --git a/crates/ra_ide_api/src/matching_brace.rs b/crates/ra_ide_api/src/matching_brace.rs index 1e2fac848..e802d01e4 100644 --- a/crates/ra_ide_api/src/matching_brace.rs +++ b/crates/ra_ide_api/src/matching_brace.rs @@ -1,9 +1,11 @@ -use ra_syntax::{algo::find_token_at_offset, ast::AstNode, SourceFile, SyntaxKind, TextUnit, T}; +use ra_syntax::{ast::AstNode, SourceFile, SyntaxKind, TextUnit, T}; pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option { const BRACES: &[SyntaxKind] = &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>]]; - let (brace_node, brace_idx) = find_token_at_offset(file.syntax(), offset) + let (brace_node, brace_idx) = file + .syntax() + .token_at_offset(offset) .filter_map(|node| { let idx = BRACES.iter().position(|&brace| brace == node.kind())?; Some((node, idx)) diff --git a/crates/ra_ide_api/src/typing.rs b/crates/ra_ide_api/src/typing.rs index 5a1cbcc49..6b3fd5904 100644 --- a/crates/ra_ide_api/src/typing.rs +++ b/crates/ra_ide_api/src/typing.rs @@ -1,11 +1,11 @@ use ra_db::{FilePosition, SourceDatabase}; use ra_fmt::leading_indent; use ra_syntax::{ - algo::{find_node_at_offset, find_token_at_offset, TokenAtOffset}, + algo::find_node_at_offset, ast::{self, AstToken}, AstNode, SmolStr, SourceFile, SyntaxKind::*, - SyntaxToken, TextRange, TextUnit, + SyntaxToken, TextRange, TextUnit, TokenAtOffset, }; use ra_text_edit::{TextEdit, TextEditBuilder}; @@ -14,7 +14,9 @@ use crate::{db::RootDatabase, SourceChange, SourceFileEdit}; pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option { let parse = db.parse(position.file_id); let file = parse.tree(); - let comment = find_token_at_offset(file.syntax(), position.offset) + let comment = file + .syntax() + .token_at_offset(position.offset) .left_biased() .and_then(ast::Comment::cast)?; @@ -45,7 +47,7 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option Option { - let ws = match find_token_at_offset(file.syntax(), token.text_range().start()) { + let ws = match file.syntax().token_at_offset(token.text_range().start()) { TokenAtOffset::Between(l, r) => { assert!(r == *token); l @@ -91,7 +93,10 @@ pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option< let parse = db.parse(position.file_id); assert_eq!(parse.tree().syntax().text().char_at(position.offset), Some('.')); - let whitespace = find_token_at_offset(parse.tree().syntax(), position.offset) + let whitespace = parse + .tree() + .syntax() + .token_at_offset(position.offset) .left_biased() .and_then(ast::Whitespace::cast)?; diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index ecd42c133..45f624810 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs @@ -5,16 +5,9 @@ use std::ops::RangeInclusive; use itertools::Itertools; use crate::{ - AstNode, Direction, NodeOrToken, SourceFile, SyntaxElement, SyntaxNode, SyntaxNodePtr, - SyntaxToken, TextRange, TextUnit, + AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit, }; -pub use rowan::TokenAtOffset; - -pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffset { - node.token_at_offset(offset) -} - /// Returns ancestors of the node at the offset, sorted by length. This should /// do the right thing at an edge, e.g. when searching for expressions at `{ /// <|>foo }` we will get the name reference instead of the whole block, which @@ -24,7 +17,7 @@ pub fn ancestors_at_offset( node: &SyntaxNode, offset: TextUnit, ) -> impl Iterator { - find_token_at_offset(node, offset) + node.token_at_offset(offset) .map(|token| token.parent().ancestors()) .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) } @@ -137,14 +130,14 @@ fn with_children( let len = new_children.iter().map(|it| it.text_len()).sum::(); let new_node = rowan::GreenNode::new(rowan::cursor::SyntaxKind(parent.kind() as u16), new_children); - let new_file_node = parent.replace_with(new_node); - let file = SourceFile::new(new_file_node); + let new_root_node = parent.replace_with(new_node); + let new_root_node = SyntaxNode::new_root(new_root_node); // FIXME: use a more elegant way to re-fetch the node (#1185), make // `range` private afterwards let mut ptr = SyntaxNodePtr::new(parent); ptr.range = TextRange::offset_len(ptr.range().start(), len); - ptr.to_node(file.syntax()).to_owned() + ptr.to_node(&new_root_node) } fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize { diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index 7b778f38c..d02078256 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs @@ -44,12 +44,10 @@ pub use crate::{ syntax_error::{Location, SyntaxError, SyntaxErrorKind}, syntax_node::{ Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, - WalkEvent, }, }; -pub use ra_parser::SyntaxKind; -pub use ra_parser::T; -pub use rowan::{SmolStr, SyntaxText, TextRange, TextUnit}; +pub use ra_parser::{SyntaxKind, T}; +pub use rowan::{SmolStr, SyntaxText, TextRange, TextUnit, TokenAtOffset, WalkEvent}; /// `Parse` is the result of the parsing: a syntax tree and a collection of /// errors. diff --git a/crates/ra_syntax/src/syntax_node.rs b/crates/ra_syntax/src/syntax_node.rs index 95795a27a..b2f5b8c64 100644 --- a/crates/ra_syntax/src/syntax_node.rs +++ b/crates/ra_syntax/src/syntax_node.rs @@ -14,7 +14,6 @@ use crate::{ Parse, SmolStr, SyntaxKind, TextUnit, }; -pub use rowan::WalkEvent; pub(crate) use rowan::{GreenNode, GreenToken}; #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -- cgit v1.2.3 From 773ad2edb3b84bf20378a577bc4cd808384de078 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sun, 21 Jul 2019 13:34:15 +0300 Subject: simiplify --- crates/ra_syntax/src/lib.rs | 15 +++++++-------- crates/ra_syntax/src/validation.rs | 6 +++--- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index d02078256..7f69b86e1 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs @@ -143,18 +143,17 @@ impl Parse { pub use crate::ast::SourceFile; impl SourceFile { - fn new(green: GreenNode) -> SourceFile { - let root = SyntaxNode::new_root(green); + pub fn parse(text: &str) -> Parse { + let (green, mut errors) = parsing::parse_text(text); + let root = SyntaxNode::new_root(green.clone()); + if cfg!(debug_assertions) { validation::validate_block_structure(&root); } - assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); - SourceFile::cast(root).unwrap() - } - pub fn parse(text: &str) -> Parse { - let (green, mut errors) = parsing::parse_text(text); - errors.extend(validation::validate(&SourceFile::new(green.clone()))); + errors.extend(validation::validate(&root)); + + assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); Parse { green, errors: Arc::new(errors), _ty: PhantomData } } } diff --git a/crates/ra_syntax/src/validation.rs b/crates/ra_syntax/src/validation.rs index 19bdafef2..e03c02d1b 100644 --- a/crates/ra_syntax/src/validation.rs +++ b/crates/ra_syntax/src/validation.rs @@ -5,16 +5,16 @@ mod field_expr; use crate::{ algo::visit::{visitor_ctx, VisitorCtx}, - ast, AstNode, SourceFile, SyntaxError, + ast, SyntaxError, SyntaxKind::{BYTE, BYTE_STRING, CHAR, STRING}, SyntaxNode, TextUnit, T, }; pub(crate) use unescape::EscapeError; -pub(crate) fn validate(file: &SourceFile) -> Vec { +pub(crate) fn validate(root: &SyntaxNode) -> Vec { let mut errors = Vec::new(); - for node in file.syntax().descendants() { + for node in root.descendants() { let _ = visitor_ctx(&mut errors) .visit::(validate_literal) .visit::(block::validate_block_node) -- cgit v1.2.3 From 5fe19d2fbd2daa05b2cd3b1ebb6fa926e9d86c36 Mon Sep 17 00:00:00 2001 From: Ekaterina Babshukova Date: Sun, 21 Jul 2019 14:11:45 +0300 Subject: provide completion in struct patterns --- crates/ra_hir/src/source_binder.rs | 9 ++- crates/ra_hir/src/ty.rs | 2 +- crates/ra_hir/src/ty/infer.rs | 22 +++-- crates/ra_ide_api/src/completion.rs | 2 + .../src/completion/complete_struct_literal.rs | 15 ++-- .../src/completion/complete_struct_pattern.rs | 94 ++++++++++++++++++++++ .../src/completion/completion_context.rs | 11 ++- 7 files changed, 134 insertions(+), 21 deletions(-) create mode 100644 crates/ra_ide_api/src/completion/complete_struct_pattern.rs diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index 4c173a4f7..fc9bc33d2 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs @@ -266,9 +266,14 @@ impl SourceAnalyzer { self.infer.as_ref()?.field_resolution(expr_id) } - pub fn resolve_variant(&self, struct_lit: &ast::StructLit) -> Option { + pub fn resolve_struct_literal(&self, struct_lit: &ast::StructLit) -> Option { let expr_id = self.body_source_map.as_ref()?.node_expr(&struct_lit.clone().into())?; - self.infer.as_ref()?.variant_resolution(expr_id) + self.infer.as_ref()?.variant_resolution_for_expr(expr_id) + } + + pub fn resolve_struct_pattern(&self, struct_pat: &ast::StructPat) -> Option { + let pat_id = self.body_source_map.as_ref()?.node_pat(&struct_pat.clone().into())?; + self.infer.as_ref()?.variant_resolution_for_pat(pat_id) } pub fn resolve_macro_call( diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index 4cf714f5d..82589e504 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs @@ -472,7 +472,7 @@ impl Ty { /// Returns the type parameters of this type if it has some (i.e. is an ADT /// or function); so if `self` is `Option`, this returns the `u32`. - fn substs(&self) -> Option { + pub fn substs(&self) -> Option { match self { Ty::Apply(ApplicationTy { parameters, .. }) => Some(parameters.clone()), _ => None, diff --git a/crates/ra_hir/src/ty/infer.rs b/crates/ra_hir/src/ty/infer.rs index a82dff711..594c5bc79 100644 --- a/crates/ra_hir/src/ty/infer.rs +++ b/crates/ra_hir/src/ty/infer.rs @@ -113,7 +113,8 @@ pub struct InferenceResult { method_resolutions: FxHashMap, /// For each field access expr, records the field it resolves to. field_resolutions: FxHashMap, - variant_resolutions: FxHashMap, + /// For each struct literal, records the variant it resolves to. + variant_resolutions: FxHashMap, /// For each associated item record what it resolves to assoc_resolutions: FxHashMap, diagnostics: Vec, @@ -128,8 +129,11 @@ impl InferenceResult { pub fn field_resolution(&self, expr: ExprId) -> Option { self.field_resolutions.get(&expr).copied() } - pub fn variant_resolution(&self, expr: ExprId) -> Option { - self.variant_resolutions.get(&expr).copied() + pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option { + self.variant_resolutions.get(&id.into()).copied() + } + pub fn variant_resolution_for_pat(&self, id: PatId) -> Option { + self.variant_resolutions.get(&id.into()).copied() } pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option { self.assoc_resolutions.get(&id.into()).copied() @@ -218,8 +222,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { self.result.field_resolutions.insert(expr, field); } - fn write_variant_resolution(&mut self, expr: ExprId, variant: VariantDef) { - self.result.variant_resolutions.insert(expr, variant); + fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantDef) { + self.result.variant_resolutions.insert(id, variant); } fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: ImplItem) { @@ -678,8 +682,12 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { subpats: &[FieldPat], expected: &Ty, default_bm: BindingMode, + id: PatId, ) -> Ty { let (ty, def) = self.resolve_variant(path); + if let Some(variant) = def { + self.write_variant_resolution(id.into(), variant); + } self.unify(&ty, expected); @@ -762,7 +770,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm) } Pat::Struct { path: ref p, args: ref fields } => { - self.infer_struct_pat(p.as_ref(), fields, expected, default_bm) + self.infer_struct_pat(p.as_ref(), fields, expected, default_bm, pat) } Pat::Path(path) => { // FIXME use correct resolver for the surrounding expression @@ -1064,7 +1072,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { Expr::StructLit { path, fields, spread } => { let (ty, def_id) = self.resolve_variant(path.as_ref()); if let Some(variant) = def_id { - self.write_variant_resolution(tgt_expr, variant); + self.write_variant_resolution(tgt_expr.into(), variant); } let substs = ty.substs().unwrap_or_else(Substs::empty); diff --git a/crates/ra_ide_api/src/completion.rs b/crates/ra_ide_api/src/completion.rs index c23b5da59..85160358a 100644 --- a/crates/ra_ide_api/src/completion.rs +++ b/crates/ra_ide_api/src/completion.rs @@ -4,6 +4,7 @@ mod presentation; mod complete_dot; mod complete_struct_literal; +mod complete_struct_pattern; mod complete_pattern; mod complete_fn_param; mod complete_keyword; @@ -65,6 +66,7 @@ pub(crate) fn completions(db: &db::RootDatabase, position: FilePosition) -> Opti complete_scope::complete_scope(&mut acc, &ctx); complete_dot::complete_dot(&mut acc, &ctx); complete_struct_literal::complete_struct_literal(&mut acc, &ctx); + complete_struct_pattern::complete_struct_pattern(&mut acc, &ctx); complete_pattern::complete_pattern(&mut acc, &ctx); complete_postfix::complete_postfix(&mut acc, &ctx); Some(acc) diff --git a/crates/ra_ide_api/src/completion/complete_struct_literal.rs b/crates/ra_ide_api/src/completion/complete_struct_literal.rs index 9410f740f..6aa41f498 100644 --- a/crates/ra_ide_api/src/completion/complete_struct_literal.rs +++ b/crates/ra_ide_api/src/completion/complete_struct_literal.rs @@ -1,23 +1,22 @@ -use hir::{Substs, Ty}; +use hir::Substs; use crate::completion::{CompletionContext, Completions}; /// Complete fields in fields literals. pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) { let (ty, variant) = match ctx.struct_lit_syntax.as_ref().and_then(|it| { - Some((ctx.analyzer.type_of(ctx.db, &it.clone().into())?, ctx.analyzer.resolve_variant(it)?)) + Some(( + ctx.analyzer.type_of(ctx.db, &it.clone().into())?, + ctx.analyzer.resolve_struct_literal(it)?, + )) }) { Some(it) => it, _ => return, }; - - let ty_substs = match ty { - Ty::Apply(it) => it.parameters, - _ => Substs::empty(), - }; + let substs = &ty.substs().unwrap_or_else(Substs::empty); for field in variant.fields(ctx.db) { - acc.add_field(ctx, field, &ty_substs); + acc.add_field(ctx, field, substs); } } diff --git a/crates/ra_ide_api/src/completion/complete_struct_pattern.rs b/crates/ra_ide_api/src/completion/complete_struct_pattern.rs new file mode 100644 index 000000000..d0dde5930 --- /dev/null +++ b/crates/ra_ide_api/src/completion/complete_struct_pattern.rs @@ -0,0 +1,94 @@ +use hir::Substs; + +use crate::completion::{CompletionContext, Completions}; + +pub(super) fn complete_struct_pattern(acc: &mut Completions, ctx: &CompletionContext) { + let (ty, variant) = match ctx.struct_lit_pat.as_ref().and_then(|it| { + Some(( + ctx.analyzer.type_of_pat(ctx.db, &it.clone().into())?, + ctx.analyzer.resolve_struct_pattern(it)?, + )) + }) { + Some(it) => it, + _ => return, + }; + let substs = &ty.substs().unwrap_or_else(Substs::empty); + + for field in variant.fields(ctx.db) { + acc.add_field(ctx, field, substs); + } +} + +#[cfg(test)] +mod tests { + use crate::completion::{do_completion, CompletionItem, CompletionKind}; + use insta::assert_debug_snapshot_matches; + + fn complete(code: &str) -> Vec { + do_completion(code, CompletionKind::Reference) + } + + #[test] + fn test_struct_pattern_field() { + let completions = complete( + r" + struct S { foo: u32 } + + fn process(f: S) { + match f { + S { f<|>: 92 } => (), + } + } + ", + ); + assert_debug_snapshot_matches!(completions, @r###" + ⋮[ + ⋮ CompletionItem { + ⋮ label: "foo", + ⋮ source_range: [117; 118), + ⋮ delete: [117; 118), + ⋮ insert: "foo", + ⋮ kind: Field, + ⋮ detail: "u32", + ⋮ }, + ⋮] + "###); + } + + #[test] + fn test_struct_pattern_enum_variant() { + let completions = complete( + r" + enum E { + S { foo: u32, bar: () } + } + + fn process(e: E) { + match e { + E::S { <|> } => (), + } + } + ", + ); + assert_debug_snapshot_matches!(completions, @r###" + ⋮[ + ⋮ CompletionItem { + ⋮ label: "bar", + ⋮ source_range: [161; 161), + ⋮ delete: [161; 161), + ⋮ insert: "bar", + ⋮ kind: Field, + ⋮ detail: "()", + ⋮ }, + ⋮ CompletionItem { + ⋮ label: "foo", + ⋮ source_range: [161; 161), + ⋮ delete: [161; 161), + ⋮ insert: "foo", + ⋮ kind: Field, + ⋮ detail: "u32", + ⋮ }, + ⋮] + "###); + } +} diff --git a/crates/ra_ide_api/src/completion/completion_context.rs b/crates/ra_ide_api/src/completion/completion_context.rs index 2f78d5409..6fee7b5be 100644 --- a/crates/ra_ide_api/src/completion/completion_context.rs +++ b/crates/ra_ide_api/src/completion/completion_context.rs @@ -21,6 +21,7 @@ pub(crate) struct CompletionContext<'a> { pub(super) function_syntax: Option, pub(super) use_item_syntax: Option, pub(super) struct_lit_syntax: Option, + pub(super) struct_lit_pat: Option, pub(super) is_param: bool, /// If a name-binding or reference to a const in a pattern. /// Irrefutable patterns (like let) are excluded. @@ -60,6 +61,7 @@ impl<'a> CompletionContext<'a> { function_syntax: None, use_item_syntax: None, struct_lit_syntax: None, + struct_lit_pat: None, is_param: false, is_pat_binding: false, is_trivial_path: false, @@ -106,8 +108,7 @@ impl<'a> CompletionContext<'a> { // Otherwise, see if this is a declaration. We can use heuristics to // suggest declaration names, see `CompletionKind::Magic`. if let Some(name) = find_node_at_offset::(file.syntax(), offset) { - if is_node::(name.syntax()) { - let bind_pat = name.syntax().ancestors().find_map(ast::BindPat::cast).unwrap(); + if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) { let parent = bind_pat.syntax().parent(); if parent.clone().and_then(ast::MatchArm::cast).is_some() || parent.and_then(ast::Condition::cast).is_some() @@ -119,6 +120,10 @@ impl<'a> CompletionContext<'a> { self.is_param = true; return; } + if name.syntax().ancestors().find_map(ast::FieldPatList::cast).is_some() { + self.struct_lit_pat = + find_node_at_offset(original_parse.tree().syntax(), self.offset); + } } } @@ -235,7 +240,7 @@ fn find_node_with_range(syntax: &SyntaxNode, range: TextRange) -> Op } fn is_node(node: &SyntaxNode) -> bool { - match node.ancestors().filter_map(N::cast).next() { + match node.ancestors().find_map(N::cast) { None => false, Some(n) => n.syntax().text_range() == node.text_range(), } -- cgit v1.2.3 From d690249bc81bc265cb3d1836c2922325f4fdb8af Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sun, 21 Jul 2019 19:08:05 +0300 Subject: Remove obsolete keybinding --- editors/code/package.json | 5 ----- 1 file changed, 5 deletions(-) diff --git a/editors/code/package.json b/editors/code/package.json index 2ed321069..ea74ab85a 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -140,11 +140,6 @@ "key": "ctrl+shift+m", "when": "editorTextFocus && editorLangId == rust" }, - { - "command": "rust-analyzer.extendSelection", - "key": "shift+alt+right", - "when": "editorTextFocus && editorLangId == rust" - }, { "command": "rust-analyzer.joinLines", "key": "ctrl+shift+j", -- cgit v1.2.3