diff options
29 files changed, 249 insertions, 783 deletions
diff --git a/Cargo.lock b/Cargo.lock index 141cc6088..8feaf27ec 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -264,11 +264,6 @@ dependencies = [ | |||
264 | ] | 264 | ] |
265 | 265 | ||
266 | [[package]] | 266 | [[package]] |
267 | name = "colosseum" | ||
268 | version = "0.2.2" | ||
269 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
270 | |||
271 | [[package]] | ||
272 | name = "console" | 267 | name = "console" |
273 | version = "0.7.7" | 268 | version = "0.7.7" |
274 | source = "registry+https://github.com/rust-lang/crates.io-index" | 269 | source = "registry+https://github.com/rust-lang/crates.io-index" |
@@ -485,6 +480,11 @@ dependencies = [ | |||
485 | ] | 480 | ] |
486 | 481 | ||
487 | [[package]] | 482 | [[package]] |
483 | name = "format-buf" | ||
484 | version = "1.0.0" | ||
485 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
486 | |||
487 | [[package]] | ||
488 | name = "fs_extra" | 488 | name = "fs_extra" |
489 | version = "1.1.0" | 489 | version = "1.1.0" |
490 | source = "registry+https://github.com/rust-lang/crates.io-index" | 490 | source = "registry+https://github.com/rust-lang/crates.io-index" |
@@ -1124,6 +1124,7 @@ name = "ra_assists" | |||
1124 | version = "0.1.0" | 1124 | version = "0.1.0" |
1125 | dependencies = [ | 1125 | dependencies = [ |
1126 | "arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", | 1126 | "arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", |
1127 | "format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
1127 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1128 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", |
1128 | "join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1129 | "join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", |
1129 | "once_cell 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1130 | "once_cell 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", |
@@ -1211,6 +1212,7 @@ dependencies = [ | |||
1211 | name = "ra_ide_api" | 1212 | name = "ra_ide_api" |
1212 | version = "0.1.0" | 1213 | version = "0.1.0" |
1213 | dependencies = [ | 1214 | dependencies = [ |
1215 | "format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
1214 | "fst 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1216 | "fst 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", |
1215 | "insta 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1217 | "insta 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", |
1216 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1218 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", |
@@ -1313,7 +1315,7 @@ dependencies = [ | |||
1313 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1315 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", |
1314 | "ra_parser 0.1.0", | 1316 | "ra_parser 0.1.0", |
1315 | "ra_text_edit 0.1.0", | 1317 | "ra_text_edit 0.1.0", |
1316 | "rowan 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", | 1318 | "rowan 0.6.0-pre.1 (registry+https://github.com/rust-lang/crates.io-index)", |
1317 | "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", | 1319 | "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", |
1318 | "test_utils 0.1.0", | 1320 | "test_utils 0.1.0", |
1319 | "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1321 | "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", |
@@ -1584,11 +1586,9 @@ dependencies = [ | |||
1584 | 1586 | ||
1585 | [[package]] | 1587 | [[package]] |
1586 | name = "rowan" | 1588 | name = "rowan" |
1587 | version = "0.5.6" | 1589 | version = "0.6.0-pre.1" |
1588 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1590 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1589 | dependencies = [ | 1591 | dependencies = [ |
1590 | "colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", | ||
1591 | "parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
1592 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1592 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", |
1593 | "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", | 1593 | "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", |
1594 | "text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", | 1594 | "text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", |
@@ -2150,7 +2150,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" | |||
2150 | "checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9" | 2150 | "checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9" |
2151 | "checksum clicolors-control 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "73abfd4c73d003a674ce5d2933fca6ce6c42480ea84a5ffe0a2dc39ed56300f9" | 2151 | "checksum clicolors-control 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "73abfd4c73d003a674ce5d2933fca6ce6c42480ea84a5ffe0a2dc39ed56300f9" |
2152 | "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" | 2152 | "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" |
2153 | "checksum colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "370c83b49aedf022ee27942e8ae1d9de1cf40dc9653ee6550e4455d08f6406f9" | ||
2154 | "checksum console 0.7.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8ca57c2c14b8a2bf3105bc9d15574aad80babf6a9c44b1058034cdf8bd169628" | 2153 | "checksum console 0.7.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8ca57c2c14b8a2bf3105bc9d15574aad80babf6a9c44b1058034cdf8bd169628" |
2155 | "checksum cpuprofiler 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "33f07976bb6821459632d7a18d97ccca005cb5c552f251f822c7c1781c1d7035" | 2154 | "checksum cpuprofiler 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "33f07976bb6821459632d7a18d97ccca005cb5c552f251f822c7c1781c1d7035" |
2156 | "checksum crossbeam-channel 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "0f0ed1a4de2235cabda8558ff5840bffb97fcb64c97827f354a451307df5f72b" | 2155 | "checksum crossbeam-channel 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "0f0ed1a4de2235cabda8558ff5840bffb97fcb64c97827f354a451307df5f72b" |
@@ -2177,6 +2176,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" | |||
2177 | "checksum filetime 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "2f8c63033fcba1f51ef744505b3cad42510432b904c062afa67ad7ece008429d" | 2176 | "checksum filetime 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "2f8c63033fcba1f51ef744505b3cad42510432b904c062afa67ad7ece008429d" |
2178 | "checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33" | 2177 | "checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33" |
2179 | "checksum flexi_logger 0.13.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e9d3c4470d1ff8446baa0c13202646722886dde8dc4c5d33cb8242d70ece79d5" | 2178 | "checksum flexi_logger 0.13.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e9d3c4470d1ff8446baa0c13202646722886dde8dc4c5d33cb8242d70ece79d5" |
2179 | "checksum format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f7aea5a5909a74969507051a3b17adc84737e31a5f910559892aedce026f4d53" | ||
2180 | "checksum fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674" | 2180 | "checksum fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674" |
2181 | "checksum fsevent 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5ab7d1bd1bd33cc98b0889831b72da23c0aa4df9cec7e0702f46ecea04b35db6" | 2181 | "checksum fsevent 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5ab7d1bd1bd33cc98b0889831b72da23c0aa4df9cec7e0702f46ecea04b35db6" |
2182 | "checksum fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f41b048a94555da0f42f1d632e2e19510084fb8e303b0daa2816e733fb3644a0" | 2182 | "checksum fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f41b048a94555da0f42f1d632e2e19510084fb8e303b0daa2816e733fb3644a0" |
@@ -2275,7 +2275,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" | |||
2275 | "checksum relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7790c7f1cc73d831d28dc5a7deb316a006e7848e6a7f467cdb10a0a9e0fb1c" | 2275 | "checksum relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7790c7f1cc73d831d28dc5a7deb316a006e7848e6a7f467cdb10a0a9e0fb1c" |
2276 | "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" | 2276 | "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" |
2277 | "checksum ron 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "17f52a24414403f81528b67488cf8edc4eda977d3af1646bb6b106a600ead78f" | 2277 | "checksum ron 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "17f52a24414403f81528b67488cf8edc4eda977d3af1646bb6b106a600ead78f" |
2278 | "checksum rowan 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0c433ffe99ac9b96fa9882805d05eee5d750c9202fb42d0546c556e5d70d54be" | 2278 | "checksum rowan 0.6.0-pre.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0eeee40f1a2724b7d0d9fa5f73a7804cd2f4c91b37ba9f785d429f31819d60df" |
2279 | "checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af" | 2279 | "checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af" |
2280 | "checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8" | 2280 | "checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8" |
2281 | "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" | 2281 | "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" |
diff --git a/crates/ra_assists/Cargo.toml b/crates/ra_assists/Cargo.toml index 5ddac1e48..2113286a3 100644 --- a/crates/ra_assists/Cargo.toml +++ b/crates/ra_assists/Cargo.toml | |||
@@ -5,6 +5,7 @@ version = "0.1.0" | |||
5 | authors = ["rust-analyzer developers"] | 5 | authors = ["rust-analyzer developers"] |
6 | 6 | ||
7 | [dependencies] | 7 | [dependencies] |
8 | format-buf = "1.0.0" | ||
8 | once_cell = "0.2.0" | 9 | once_cell = "0.2.0" |
9 | join_to_string = "0.1.3" | 10 | join_to_string = "0.1.3" |
10 | itertools = "0.8.0" | 11 | itertools = "0.8.0" |
diff --git a/crates/ra_assists/src/add_impl.rs b/crates/ra_assists/src/add_impl.rs index 59ca88468..4b61f4031 100644 --- a/crates/ra_assists/src/add_impl.rs +++ b/crates/ra_assists/src/add_impl.rs | |||
@@ -1,5 +1,4 @@ | |||
1 | use std::fmt::Write; | 1 | use format_buf::format; |
2 | |||
3 | use hir::db::HirDatabase; | 2 | use hir::db::HirDatabase; |
4 | use join_to_string::join; | 3 | use join_to_string::join; |
5 | use ra_syntax::{ | 4 | use ra_syntax::{ |
@@ -19,7 +18,7 @@ pub(crate) fn add_impl(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | |||
19 | let mut buf = String::new(); | 18 | let mut buf = String::new(); |
20 | buf.push_str("\n\nimpl"); | 19 | buf.push_str("\n\nimpl"); |
21 | if let Some(type_params) = &type_params { | 20 | if let Some(type_params) = &type_params { |
22 | write!(buf, "{}", type_params.syntax()).unwrap(); | 21 | format!(buf, "{}", type_params.syntax()); |
23 | } | 22 | } |
24 | buf.push_str(" "); | 23 | buf.push_str(" "); |
25 | buf.push_str(name.text().as_str()); | 24 | buf.push_str(name.text().as_str()); |
diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs index 4d5a76de6..a12c3ed54 100644 --- a/crates/ra_assists/src/assist_ctx.rs +++ b/crates/ra_assists/src/assist_ctx.rs | |||
@@ -2,8 +2,9 @@ use hir::db::HirDatabase; | |||
2 | use ra_db::FileRange; | 2 | use ra_db::FileRange; |
3 | use ra_fmt::{leading_indent, reindent}; | 3 | use ra_fmt::{leading_indent, reindent}; |
4 | use ra_syntax::{ | 4 | use ra_syntax::{ |
5 | algo::{find_covering_element, find_node_at_offset, find_token_at_offset, TokenAtOffset}, | 5 | algo::{find_covering_element, find_node_at_offset}, |
6 | AstNode, SourceFile, SyntaxElement, SyntaxNode, SyntaxToken, TextRange, TextUnit, | 6 | AstNode, SourceFile, SyntaxElement, SyntaxNode, SyntaxToken, TextRange, TextUnit, |
7 | TokenAtOffset, | ||
7 | }; | 8 | }; |
8 | use ra_text_edit::TextEditBuilder; | 9 | use ra_text_edit::TextEditBuilder; |
9 | 10 | ||
@@ -105,7 +106,7 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> { | |||
105 | } | 106 | } |
106 | 107 | ||
107 | pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> { | 108 | pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> { |
108 | find_token_at_offset(self.source_file.syntax(), self.frange.range.start()) | 109 | self.source_file.syntax().token_at_offset(self.frange.range.start()) |
109 | } | 110 | } |
110 | 111 | ||
111 | pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<N> { | 112 | pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<N> { |
diff --git a/crates/ra_assists/src/ast_editor.rs b/crates/ra_assists/src/ast_editor.rs index ab6c347ad..95b871b30 100644 --- a/crates/ra_assists/src/ast_editor.rs +++ b/crates/ra_assists/src/ast_editor.rs | |||
@@ -4,7 +4,10 @@ use arrayvec::ArrayVec; | |||
4 | use hir::Name; | 4 | use hir::Name; |
5 | use ra_fmt::leading_indent; | 5 | use ra_fmt::leading_indent; |
6 | use ra_syntax::{ | 6 | use ra_syntax::{ |
7 | ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement, SyntaxKind::*, T, | 7 | algo::{insert_children, replace_children}, |
8 | ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement, | ||
9 | SyntaxKind::*, | ||
10 | T, | ||
8 | }; | 11 | }; |
9 | use ra_text_edit::TextEditBuilder; | 12 | use ra_text_edit::TextEditBuilder; |
10 | 13 | ||
@@ -38,7 +41,7 @@ impl<N: AstNode> AstEditor<N> { | |||
38 | position: InsertPosition<SyntaxElement>, | 41 | position: InsertPosition<SyntaxElement>, |
39 | to_insert: impl Iterator<Item = SyntaxElement>, | 42 | to_insert: impl Iterator<Item = SyntaxElement>, |
40 | ) -> N { | 43 | ) -> N { |
41 | let new_syntax = self.ast().syntax().insert_children(position, to_insert); | 44 | let new_syntax = insert_children(self.ast().syntax(), position, to_insert); |
42 | N::cast(new_syntax).unwrap() | 45 | N::cast(new_syntax).unwrap() |
43 | } | 46 | } |
44 | 47 | ||
@@ -48,7 +51,7 @@ impl<N: AstNode> AstEditor<N> { | |||
48 | to_delete: RangeInclusive<SyntaxElement>, | 51 | to_delete: RangeInclusive<SyntaxElement>, |
49 | to_insert: impl Iterator<Item = SyntaxElement>, | 52 | to_insert: impl Iterator<Item = SyntaxElement>, |
50 | ) -> N { | 53 | ) -> N { |
51 | let new_syntax = self.ast().syntax().replace_children(to_delete, to_insert); | 54 | let new_syntax = replace_children(self.ast().syntax(), to_delete, to_insert); |
52 | N::cast(new_syntax).unwrap() | 55 | N::cast(new_syntax).unwrap() |
53 | } | 56 | } |
54 | 57 | ||
diff --git a/crates/ra_assists/src/introduce_variable.rs b/crates/ra_assists/src/introduce_variable.rs index 911de2d48..5eb708310 100644 --- a/crates/ra_assists/src/introduce_variable.rs +++ b/crates/ra_assists/src/introduce_variable.rs | |||
@@ -1,5 +1,4 @@ | |||
1 | use std::fmt::Write; | 1 | use format_buf::format; |
2 | |||
3 | use hir::db::HirDatabase; | 2 | use hir::db::HirDatabase; |
4 | use ra_syntax::{ | 3 | use ra_syntax::{ |
5 | ast::{self, AstNode}, | 4 | ast::{self, AstNode}, |
@@ -37,7 +36,7 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option | |||
37 | buf.push_str("let var_name = "); | 36 | buf.push_str("let var_name = "); |
38 | TextUnit::of_str("let ") | 37 | TextUnit::of_str("let ") |
39 | }; | 38 | }; |
40 | write!(buf, "{}", expr.syntax()).unwrap(); | 39 | format!(buf, "{}", expr.syntax()); |
41 | let full_stmt = ast::ExprStmt::cast(anchor_stmt.clone()); | 40 | let full_stmt = ast::ExprStmt::cast(anchor_stmt.clone()); |
42 | let is_full_stmt = if let Some(expr_stmt) = &full_stmt { | 41 | let is_full_stmt = if let Some(expr_stmt) = &full_stmt { |
43 | Some(expr.syntax().clone()) == expr_stmt.expr().map(|e| e.syntax().clone()) | 42 | Some(expr.syntax().clone()) == expr_stmt.expr().map(|e| e.syntax().clone()) |
diff --git a/crates/ra_assists/src/move_guard.rs b/crates/ra_assists/src/move_guard.rs index 0f3cdbe53..127c9e068 100644 --- a/crates/ra_assists/src/move_guard.rs +++ b/crates/ra_assists/src/move_guard.rs | |||
@@ -2,7 +2,7 @@ use hir::db::HirDatabase; | |||
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | ast, | 3 | ast, |
4 | ast::{AstNode, AstToken, IfExpr, MatchArm}, | 4 | ast::{AstNode, AstToken, IfExpr, MatchArm}, |
5 | SyntaxElement, TextUnit, | 5 | TextUnit, |
6 | }; | 6 | }; |
7 | 7 | ||
8 | use crate::{Assist, AssistCtx, AssistId}; | 8 | use crate::{Assist, AssistCtx, AssistId}; |
@@ -18,10 +18,10 @@ pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx<impl HirDatabase>) -> Op | |||
18 | 18 | ||
19 | ctx.add_action(AssistId("move_guard_to_arm_body"), "move guard to arm body", |edit| { | 19 | ctx.add_action(AssistId("move_guard_to_arm_body"), "move guard to arm body", |edit| { |
20 | edit.target(guard.syntax().text_range()); | 20 | edit.target(guard.syntax().text_range()); |
21 | let offseting_amount = match &space_before_guard { | 21 | let offseting_amount = match space_before_guard.and_then(|it| it.into_token()) { |
22 | Some(SyntaxElement::Token(tok)) => { | 22 | Some(tok) => { |
23 | if let Some(_) = ast::Whitespace::cast(tok.clone()) { | 23 | if let Some(_) = ast::Whitespace::cast(tok.clone()) { |
24 | let ele = space_before_guard.unwrap().text_range(); | 24 | let ele = tok.text_range(); |
25 | edit.delete(ele); | 25 | edit.delete(ele); |
26 | ele.len() | 26 | ele.len() |
27 | } else { | 27 | } else { |
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index 706500484..676711d0a 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs | |||
@@ -3211,8 +3211,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { | |||
3211 | ); | 3211 | ); |
3212 | { | 3212 | { |
3213 | let file = db.parse(pos.file_id).ok().unwrap(); | 3213 | let file = db.parse(pos.file_id).ok().unwrap(); |
3214 | let node = | 3214 | let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent(); |
3215 | algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); | ||
3216 | let events = db.log_executed(|| { | 3215 | let events = db.log_executed(|| { |
3217 | SourceAnalyzer::new(&db, pos.file_id, &node, None); | 3216 | SourceAnalyzer::new(&db, pos.file_id, &node, None); |
3218 | }); | 3217 | }); |
@@ -3232,8 +3231,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { | |||
3232 | 3231 | ||
3233 | { | 3232 | { |
3234 | let file = db.parse(pos.file_id).ok().unwrap(); | 3233 | let file = db.parse(pos.file_id).ok().unwrap(); |
3235 | let node = | 3234 | let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent(); |
3236 | algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); | ||
3237 | let events = db.log_executed(|| { | 3235 | let events = db.log_executed(|| { |
3238 | SourceAnalyzer::new(&db, pos.file_id, &node, None); | 3236 | SourceAnalyzer::new(&db, pos.file_id, &node, None); |
3239 | }); | 3237 | }); |
diff --git a/crates/ra_ide_api/Cargo.toml b/crates/ra_ide_api/Cargo.toml index c49a05de1..78a3db14d 100644 --- a/crates/ra_ide_api/Cargo.toml +++ b/crates/ra_ide_api/Cargo.toml | |||
@@ -5,6 +5,7 @@ version = "0.1.0" | |||
5 | authors = ["rust-analyzer developers"] | 5 | authors = ["rust-analyzer developers"] |
6 | 6 | ||
7 | [dependencies] | 7 | [dependencies] |
8 | format-buf = "1.0.0" | ||
8 | itertools = "0.8.0" | 9 | itertools = "0.8.0" |
9 | join_to_string = "0.1.3" | 10 | join_to_string = "0.1.3" |
10 | log = "0.4.5" | 11 | log = "0.4.5" |
diff --git a/crates/ra_ide_api/src/completion/completion_context.rs b/crates/ra_ide_api/src/completion/completion_context.rs index 6fee7b5be..dfaa9ce69 100644 --- a/crates/ra_ide_api/src/completion/completion_context.rs +++ b/crates/ra_ide_api/src/completion/completion_context.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | use hir::source_binder; | 1 | use hir::source_binder; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | algo::{find_covering_element, find_node_at_offset, find_token_at_offset}, | 3 | algo::{find_covering_element, find_node_at_offset}, |
4 | ast, AstNode, Parse, SourceFile, | 4 | ast, AstNode, Parse, SourceFile, |
5 | SyntaxKind::*, | 5 | SyntaxKind::*, |
6 | SyntaxNode, SyntaxToken, TextRange, TextUnit, | 6 | SyntaxNode, SyntaxToken, TextRange, TextUnit, |
@@ -49,7 +49,7 @@ impl<'a> CompletionContext<'a> { | |||
49 | ) -> Option<CompletionContext<'a>> { | 49 | ) -> Option<CompletionContext<'a>> { |
50 | let module = source_binder::module_from_position(db, position); | 50 | let module = source_binder::module_from_position(db, position); |
51 | let token = | 51 | let token = |
52 | find_token_at_offset(original_parse.tree().syntax(), position.offset).left_biased()?; | 52 | original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?; |
53 | let analyzer = | 53 | let analyzer = |
54 | hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset)); | 54 | hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset)); |
55 | let mut ctx = CompletionContext { | 55 | let mut ctx = CompletionContext { |
diff --git a/crates/ra_ide_api/src/display/short_label.rs b/crates/ra_ide_api/src/display/short_label.rs index be499e485..825a033ee 100644 --- a/crates/ra_ide_api/src/display/short_label.rs +++ b/crates/ra_ide_api/src/display/short_label.rs | |||
@@ -1,5 +1,4 @@ | |||
1 | use std::fmt::Write; | 1 | use format_buf::format; |
2 | |||
3 | use ra_syntax::ast::{self, AstNode, NameOwner, TypeAscriptionOwner, VisibilityOwner}; | 2 | use ra_syntax::ast::{self, AstNode, NameOwner, TypeAscriptionOwner, VisibilityOwner}; |
4 | 3 | ||
5 | pub(crate) trait ShortLabel { | 4 | pub(crate) trait ShortLabel { |
@@ -73,7 +72,7 @@ where | |||
73 | let mut buf = short_label_from_node(node, prefix)?; | 72 | let mut buf = short_label_from_node(node, prefix)?; |
74 | 73 | ||
75 | if let Some(type_ref) = node.ascribed_type() { | 74 | if let Some(type_ref) = node.ascribed_type() { |
76 | write!(buf, ": {}", type_ref.syntax()).unwrap(); | 75 | format!(buf, ": {}", type_ref.syntax()); |
77 | } | 76 | } |
78 | 77 | ||
79 | Some(buf) | 78 | Some(buf) |
diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs index 140820df6..edbf622c1 100644 --- a/crates/ra_ide_api/src/extend_selection.rs +++ b/crates/ra_ide_api/src/extend_selection.rs | |||
@@ -1,10 +1,10 @@ | |||
1 | use ra_db::SourceDatabase; | 1 | use ra_db::SourceDatabase; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | algo::{find_covering_element, find_token_at_offset, TokenAtOffset}, | 3 | algo::find_covering_element, |
4 | ast::{self, AstNode, AstToken}, | 4 | ast::{self, AstNode, AstToken}, |
5 | Direction, SyntaxElement, | 5 | Direction, NodeOrToken, |
6 | SyntaxKind::*, | 6 | SyntaxKind::*, |
7 | SyntaxNode, SyntaxToken, TextRange, TextUnit, T, | 7 | SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T, |
8 | }; | 8 | }; |
9 | 9 | ||
10 | use crate::{db::RootDatabase, FileRange}; | 10 | use crate::{db::RootDatabase, FileRange}; |
@@ -34,7 +34,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange | |||
34 | 34 | ||
35 | if range.is_empty() { | 35 | if range.is_empty() { |
36 | let offset = range.start(); | 36 | let offset = range.start(); |
37 | let mut leaves = find_token_at_offset(root, offset); | 37 | let mut leaves = root.token_at_offset(offset); |
38 | if leaves.clone().all(|it| it.kind() == WHITESPACE) { | 38 | if leaves.clone().all(|it| it.kind() == WHITESPACE) { |
39 | return Some(extend_ws(root, leaves.next()?, offset)); | 39 | return Some(extend_ws(root, leaves.next()?, offset)); |
40 | } | 40 | } |
@@ -53,7 +53,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange | |||
53 | return Some(leaf_range); | 53 | return Some(leaf_range); |
54 | }; | 54 | }; |
55 | let node = match find_covering_element(root, range) { | 55 | let node = match find_covering_element(root, range) { |
56 | SyntaxElement::Token(token) => { | 56 | NodeOrToken::Token(token) => { |
57 | if token.text_range() != range { | 57 | if token.text_range() != range { |
58 | return Some(token.text_range()); | 58 | return Some(token.text_range()); |
59 | } | 59 | } |
@@ -64,7 +64,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange | |||
64 | } | 64 | } |
65 | token.parent() | 65 | token.parent() |
66 | } | 66 | } |
67 | SyntaxElement::Node(node) => node, | 67 | NodeOrToken::Node(node) => node, |
68 | }; | 68 | }; |
69 | if node.text_range() != range { | 69 | if node.text_range() != range { |
70 | return Some(node.text_range()); | 70 | return Some(node.text_range()); |
@@ -153,8 +153,8 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> { | |||
153 | node.siblings_with_tokens(dir) | 153 | node.siblings_with_tokens(dir) |
154 | .skip(1) | 154 | .skip(1) |
155 | .skip_while(|node| match node { | 155 | .skip_while(|node| match node { |
156 | SyntaxElement::Node(_) => false, | 156 | NodeOrToken::Node(_) => false, |
157 | SyntaxElement::Token(it) => is_single_line_ws(it), | 157 | NodeOrToken::Token(it) => is_single_line_ws(it), |
158 | }) | 158 | }) |
159 | .next() | 159 | .next() |
160 | .and_then(|it| it.into_token()) | 160 | .and_then(|it| it.into_token()) |
diff --git a/crates/ra_ide_api/src/folding_ranges.rs b/crates/ra_ide_api/src/folding_ranges.rs index 571d1c595..e60ae8cf6 100644 --- a/crates/ra_ide_api/src/folding_ranges.rs +++ b/crates/ra_ide_api/src/folding_ranges.rs | |||
@@ -2,7 +2,7 @@ use rustc_hash::FxHashSet; | |||
2 | 2 | ||
3 | use ra_syntax::{ | 3 | use ra_syntax::{ |
4 | ast::{self, AstNode, AstToken, VisibilityOwner}, | 4 | ast::{self, AstNode, AstToken, VisibilityOwner}, |
5 | Direction, SourceFile, SyntaxElement, | 5 | Direction, NodeOrToken, SourceFile, |
6 | SyntaxKind::{self, *}, | 6 | SyntaxKind::{self, *}, |
7 | SyntaxNode, TextRange, | 7 | SyntaxNode, TextRange, |
8 | }; | 8 | }; |
@@ -31,8 +31,8 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> { | |||
31 | // Fold items that span multiple lines | 31 | // Fold items that span multiple lines |
32 | if let Some(kind) = fold_kind(element.kind()) { | 32 | if let Some(kind) = fold_kind(element.kind()) { |
33 | let is_multiline = match &element { | 33 | let is_multiline = match &element { |
34 | SyntaxElement::Node(node) => node.text().contains_char('\n'), | 34 | NodeOrToken::Node(node) => node.text().contains_char('\n'), |
35 | SyntaxElement::Token(token) => token.text().contains('\n'), | 35 | NodeOrToken::Token(token) => token.text().contains('\n'), |
36 | }; | 36 | }; |
37 | if is_multiline { | 37 | if is_multiline { |
38 | res.push(Fold { range: element.text_range(), kind }); | 38 | res.push(Fold { range: element.text_range(), kind }); |
@@ -41,7 +41,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> { | |||
41 | } | 41 | } |
42 | 42 | ||
43 | match element { | 43 | match element { |
44 | SyntaxElement::Token(token) => { | 44 | NodeOrToken::Token(token) => { |
45 | // Fold groups of comments | 45 | // Fold groups of comments |
46 | if let Some(comment) = ast::Comment::cast(token) { | 46 | if let Some(comment) = ast::Comment::cast(token) { |
47 | if !visited_comments.contains(&comment) { | 47 | if !visited_comments.contains(&comment) { |
@@ -53,7 +53,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> { | |||
53 | } | 53 | } |
54 | } | 54 | } |
55 | } | 55 | } |
56 | SyntaxElement::Node(node) => { | 56 | NodeOrToken::Node(node) => { |
57 | // Fold groups of imports | 57 | // Fold groups of imports |
58 | if node.kind() == USE_ITEM && !visited_imports.contains(&node) { | 58 | if node.kind() == USE_ITEM && !visited_imports.contains(&node) { |
59 | if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) { | 59 | if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) { |
@@ -108,7 +108,7 @@ fn contiguous_range_for_group_unless( | |||
108 | let mut last = first.clone(); | 108 | let mut last = first.clone(); |
109 | for element in first.siblings_with_tokens(Direction::Next) { | 109 | for element in first.siblings_with_tokens(Direction::Next) { |
110 | let node = match element { | 110 | let node = match element { |
111 | SyntaxElement::Token(token) => { | 111 | NodeOrToken::Token(token) => { |
112 | if let Some(ws) = ast::Whitespace::cast(token) { | 112 | if let Some(ws) = ast::Whitespace::cast(token) { |
113 | if !ws.spans_multiple_lines() { | 113 | if !ws.spans_multiple_lines() { |
114 | // Ignore whitespace without blank lines | 114 | // Ignore whitespace without blank lines |
@@ -119,7 +119,7 @@ fn contiguous_range_for_group_unless( | |||
119 | // group ends here | 119 | // group ends here |
120 | break; | 120 | break; |
121 | } | 121 | } |
122 | SyntaxElement::Node(node) => node, | 122 | NodeOrToken::Node(node) => node, |
123 | }; | 123 | }; |
124 | 124 | ||
125 | // Stop if we find a node that doesn't belong to the group | 125 | // Stop if we find a node that doesn't belong to the group |
@@ -154,7 +154,7 @@ fn contiguous_range_for_comment( | |||
154 | let mut last = first.clone(); | 154 | let mut last = first.clone(); |
155 | for element in first.syntax().siblings_with_tokens(Direction::Next) { | 155 | for element in first.syntax().siblings_with_tokens(Direction::Next) { |
156 | match element { | 156 | match element { |
157 | SyntaxElement::Token(token) => { | 157 | NodeOrToken::Token(token) => { |
158 | if let Some(ws) = ast::Whitespace::cast(token.clone()) { | 158 | if let Some(ws) = ast::Whitespace::cast(token.clone()) { |
159 | if !ws.spans_multiple_lines() { | 159 | if !ws.spans_multiple_lines() { |
160 | // Ignore whitespace without blank lines | 160 | // Ignore whitespace without blank lines |
@@ -173,7 +173,7 @@ fn contiguous_range_for_comment( | |||
173 | // * A comment of a different flavor was reached | 173 | // * A comment of a different flavor was reached |
174 | break; | 174 | break; |
175 | } | 175 | } |
176 | SyntaxElement::Node(_) => break, | 176 | NodeOrToken::Node(_) => break, |
177 | }; | 177 | }; |
178 | } | 178 | } |
179 | 179 | ||
diff --git a/crates/ra_ide_api/src/goto_type_definition.rs b/crates/ra_ide_api/src/goto_type_definition.rs index 007259d9e..72884e5ca 100644 --- a/crates/ra_ide_api/src/goto_type_definition.rs +++ b/crates/ra_ide_api/src/goto_type_definition.rs | |||
@@ -1,5 +1,5 @@ | |||
1 | use ra_db::SourceDatabase; | 1 | use ra_db::SourceDatabase; |
2 | use ra_syntax::{algo::find_token_at_offset, ast, AstNode}; | 2 | use ra_syntax::{ast, AstNode}; |
3 | 3 | ||
4 | use crate::{db::RootDatabase, FilePosition, NavigationTarget, RangeInfo}; | 4 | use crate::{db::RootDatabase, FilePosition, NavigationTarget, RangeInfo}; |
5 | 5 | ||
@@ -9,7 +9,7 @@ pub(crate) fn goto_type_definition( | |||
9 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { | 9 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { |
10 | let parse = db.parse(position.file_id); | 10 | let parse = db.parse(position.file_id); |
11 | 11 | ||
12 | let node = find_token_at_offset(parse.tree().syntax(), position.offset).find_map(|token| { | 12 | let node = parse.tree().syntax().token_at_offset(position.offset).find_map(|token| { |
13 | token | 13 | token |
14 | .parent() | 14 | .parent() |
15 | .ancestors() | 15 | .ancestors() |
diff --git a/crates/ra_ide_api/src/join_lines.rs b/crates/ra_ide_api/src/join_lines.rs index 7f25f2108..a2e4b6f3c 100644 --- a/crates/ra_ide_api/src/join_lines.rs +++ b/crates/ra_ide_api/src/join_lines.rs | |||
@@ -3,7 +3,7 @@ use ra_fmt::{compute_ws, extract_trivial_expression}; | |||
3 | use ra_syntax::{ | 3 | use ra_syntax::{ |
4 | algo::{find_covering_element, non_trivia_sibling}, | 4 | algo::{find_covering_element, non_trivia_sibling}, |
5 | ast::{self, AstNode, AstToken}, | 5 | ast::{self, AstNode, AstToken}, |
6 | Direction, SourceFile, SyntaxElement, | 6 | Direction, NodeOrToken, SourceFile, |
7 | SyntaxKind::{self, WHITESPACE}, | 7 | SyntaxKind::{self, WHITESPACE}, |
8 | SyntaxNode, SyntaxToken, TextRange, TextUnit, T, | 8 | SyntaxNode, SyntaxToken, TextRange, TextUnit, T, |
9 | }; | 9 | }; |
@@ -23,8 +23,8 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit { | |||
23 | }; | 23 | }; |
24 | 24 | ||
25 | let node = match find_covering_element(file.syntax(), range) { | 25 | let node = match find_covering_element(file.syntax(), range) { |
26 | SyntaxElement::Node(node) => node, | 26 | NodeOrToken::Node(node) => node, |
27 | SyntaxElement::Token(token) => token.parent(), | 27 | NodeOrToken::Token(token) => token.parent(), |
28 | }; | 28 | }; |
29 | let mut edit = TextEditBuilder::default(); | 29 | let mut edit = TextEditBuilder::default(); |
30 | for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) { | 30 | for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) { |
diff --git a/crates/ra_ide_api/src/matching_brace.rs b/crates/ra_ide_api/src/matching_brace.rs index 1e2fac848..e802d01e4 100644 --- a/crates/ra_ide_api/src/matching_brace.rs +++ b/crates/ra_ide_api/src/matching_brace.rs | |||
@@ -1,9 +1,11 @@ | |||
1 | use ra_syntax::{algo::find_token_at_offset, ast::AstNode, SourceFile, SyntaxKind, TextUnit, T}; | 1 | use ra_syntax::{ast::AstNode, SourceFile, SyntaxKind, TextUnit, T}; |
2 | 2 | ||
3 | pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> { | 3 | pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> { |
4 | const BRACES: &[SyntaxKind] = | 4 | const BRACES: &[SyntaxKind] = |
5 | &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>]]; | 5 | &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>]]; |
6 | let (brace_node, brace_idx) = find_token_at_offset(file.syntax(), offset) | 6 | let (brace_node, brace_idx) = file |
7 | .syntax() | ||
8 | .token_at_offset(offset) | ||
7 | .filter_map(|node| { | 9 | .filter_map(|node| { |
8 | let idx = BRACES.iter().position(|&brace| brace == node.kind())?; | 10 | let idx = BRACES.iter().position(|&brace| brace == node.kind())?; |
9 | Some((node, idx)) | 11 | Some((node, idx)) |
diff --git a/crates/ra_ide_api/src/syntax_tree.rs b/crates/ra_ide_api/src/syntax_tree.rs index 76c50f6d6..a07e670fa 100644 --- a/crates/ra_ide_api/src/syntax_tree.rs +++ b/crates/ra_ide_api/src/syntax_tree.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | use crate::db::RootDatabase; | 1 | use crate::db::RootDatabase; |
2 | use ra_db::SourceDatabase; | 2 | use ra_db::SourceDatabase; |
3 | use ra_syntax::{ | 3 | use ra_syntax::{ |
4 | algo, AstNode, SourceFile, SyntaxElement, | 4 | algo, AstNode, NodeOrToken, SourceFile, |
5 | SyntaxKind::{RAW_STRING, STRING}, | 5 | SyntaxKind::{RAW_STRING, STRING}, |
6 | SyntaxToken, TextRange, | 6 | SyntaxToken, TextRange, |
7 | }; | 7 | }; |
@@ -16,8 +16,8 @@ pub(crate) fn syntax_tree( | |||
16 | let parse = db.parse(file_id); | 16 | let parse = db.parse(file_id); |
17 | if let Some(text_range) = text_range { | 17 | if let Some(text_range) = text_range { |
18 | let node = match algo::find_covering_element(parse.tree().syntax(), text_range) { | 18 | let node = match algo::find_covering_element(parse.tree().syntax(), text_range) { |
19 | SyntaxElement::Node(node) => node, | 19 | NodeOrToken::Node(node) => node, |
20 | SyntaxElement::Token(token) => { | 20 | NodeOrToken::Token(token) => { |
21 | if let Some(tree) = syntax_tree_for_string(&token, text_range) { | 21 | if let Some(tree) = syntax_tree_for_string(&token, text_range) { |
22 | return tree; | 22 | return tree; |
23 | } | 23 | } |
diff --git a/crates/ra_ide_api/src/typing.rs b/crates/ra_ide_api/src/typing.rs index 5a1cbcc49..6b3fd5904 100644 --- a/crates/ra_ide_api/src/typing.rs +++ b/crates/ra_ide_api/src/typing.rs | |||
@@ -1,11 +1,11 @@ | |||
1 | use ra_db::{FilePosition, SourceDatabase}; | 1 | use ra_db::{FilePosition, SourceDatabase}; |
2 | use ra_fmt::leading_indent; | 2 | use ra_fmt::leading_indent; |
3 | use ra_syntax::{ | 3 | use ra_syntax::{ |
4 | algo::{find_node_at_offset, find_token_at_offset, TokenAtOffset}, | 4 | algo::find_node_at_offset, |
5 | ast::{self, AstToken}, | 5 | ast::{self, AstToken}, |
6 | AstNode, SmolStr, SourceFile, | 6 | AstNode, SmolStr, SourceFile, |
7 | SyntaxKind::*, | 7 | SyntaxKind::*, |
8 | SyntaxToken, TextRange, TextUnit, | 8 | SyntaxToken, TextRange, TextUnit, TokenAtOffset, |
9 | }; | 9 | }; |
10 | use ra_text_edit::{TextEdit, TextEditBuilder}; | 10 | use ra_text_edit::{TextEdit, TextEditBuilder}; |
11 | 11 | ||
@@ -14,7 +14,9 @@ use crate::{db::RootDatabase, SourceChange, SourceFileEdit}; | |||
14 | pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> { | 14 | pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> { |
15 | let parse = db.parse(position.file_id); | 15 | let parse = db.parse(position.file_id); |
16 | let file = parse.tree(); | 16 | let file = parse.tree(); |
17 | let comment = find_token_at_offset(file.syntax(), position.offset) | 17 | let comment = file |
18 | .syntax() | ||
19 | .token_at_offset(position.offset) | ||
18 | .left_biased() | 20 | .left_biased() |
19 | .and_then(ast::Comment::cast)?; | 21 | .and_then(ast::Comment::cast)?; |
20 | 22 | ||
@@ -45,7 +47,7 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour | |||
45 | } | 47 | } |
46 | 48 | ||
47 | fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> { | 49 | fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> { |
48 | let ws = match find_token_at_offset(file.syntax(), token.text_range().start()) { | 50 | let ws = match file.syntax().token_at_offset(token.text_range().start()) { |
49 | TokenAtOffset::Between(l, r) => { | 51 | TokenAtOffset::Between(l, r) => { |
50 | assert!(r == *token); | 52 | assert!(r == *token); |
51 | l | 53 | l |
@@ -91,7 +93,10 @@ pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option< | |||
91 | let parse = db.parse(position.file_id); | 93 | let parse = db.parse(position.file_id); |
92 | assert_eq!(parse.tree().syntax().text().char_at(position.offset), Some('.')); | 94 | assert_eq!(parse.tree().syntax().text().char_at(position.offset), Some('.')); |
93 | 95 | ||
94 | let whitespace = find_token_at_offset(parse.tree().syntax(), position.offset) | 96 | let whitespace = parse |
97 | .tree() | ||
98 | .syntax() | ||
99 | .token_at_offset(position.offset) | ||
95 | .left_biased() | 100 | .left_biased() |
96 | .and_then(ast::Whitespace::cast)?; | 101 | .and_then(ast::Whitespace::cast)?; |
97 | 102 | ||
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 7ff0fc472..8225759e7 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -1,12 +1,13 @@ | |||
1 | use crate::subtree_source::SubtreeTokenSource; | ||
2 | use crate::ExpandError; | ||
3 | use ra_parser::{ParseError, TreeSink}; | 1 | use ra_parser::{ParseError, TreeSink}; |
4 | use ra_syntax::{ | 2 | use ra_syntax::{ |
5 | ast, AstNode, AstToken, Parse, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxNode, | 3 | ast, AstNode, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode, |
6 | SyntaxTreeBuilder, TextRange, TextUnit, T, | 4 | SyntaxTreeBuilder, TextRange, TextUnit, T, |
7 | }; | 5 | }; |
8 | use tt::buffer::{Cursor, TokenBuffer}; | 6 | use tt::buffer::{Cursor, TokenBuffer}; |
9 | 7 | ||
8 | use crate::subtree_source::SubtreeTokenSource; | ||
9 | use crate::ExpandError; | ||
10 | |||
10 | /// Maps `tt::TokenId` to the relative range of the original token. | 11 | /// Maps `tt::TokenId` to the relative range of the original token. |
11 | #[derive(Default)] | 12 | #[derive(Default)] |
12 | pub struct TokenMap { | 13 | pub struct TokenMap { |
@@ -200,7 +201,7 @@ fn convert_tt( | |||
200 | } | 201 | } |
201 | 202 | ||
202 | match child { | 203 | match child { |
203 | SyntaxElement::Token(token) => { | 204 | NodeOrToken::Token(token) => { |
204 | if let Some(doc_tokens) = convert_doc_comment(&token) { | 205 | if let Some(doc_tokens) = convert_doc_comment(&token) { |
205 | token_trees.extend(doc_tokens); | 206 | token_trees.extend(doc_tokens); |
206 | } else if token.kind().is_trivia() { | 207 | } else if token.kind().is_trivia() { |
@@ -210,7 +211,7 @@ fn convert_tt( | |||
210 | let char = token.text().chars().next().unwrap(); | 211 | let char = token.text().chars().next().unwrap(); |
211 | 212 | ||
212 | let spacing = match child_iter.peek() { | 213 | let spacing = match child_iter.peek() { |
213 | Some(SyntaxElement::Token(token)) => { | 214 | Some(NodeOrToken::Token(token)) => { |
214 | if token.kind().is_punct() { | 215 | if token.kind().is_punct() { |
215 | tt::Spacing::Joint | 216 | tt::Spacing::Joint |
216 | } else { | 217 | } else { |
@@ -241,7 +242,7 @@ fn convert_tt( | |||
241 | token_trees.push(child); | 242 | token_trees.push(child); |
242 | } | 243 | } |
243 | } | 244 | } |
244 | SyntaxElement::Node(node) => { | 245 | NodeOrToken::Node(node) => { |
245 | let child = convert_tt(token_map, global_offset, &node)?.into(); | 246 | let child = convert_tt(token_map, global_offset, &node)?.into(); |
246 | token_trees.push(child); | 247 | token_trees.push(child); |
247 | } | 248 | } |
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs index 9151b6ecd..192e9007d 100644 --- a/crates/ra_mbe/src/tests.rs +++ b/crates/ra_mbe/src/tests.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use ra_syntax::{ast, AstNode}; | 1 | use ra_syntax::{ast, AstNode, NodeOrToken}; |
2 | 2 | ||
3 | use super::*; | 3 | use super::*; |
4 | 4 | ||
@@ -118,11 +118,11 @@ pub fn debug_dump_ignore_spaces(node: &ra_syntax::SyntaxNode) -> String { | |||
118 | match event { | 118 | match event { |
119 | WalkEvent::Enter(element) => { | 119 | WalkEvent::Enter(element) => { |
120 | match element { | 120 | match element { |
121 | ra_syntax::SyntaxElement::Node(node) => { | 121 | NodeOrToken::Node(node) => { |
122 | indent!(); | 122 | indent!(); |
123 | writeln!(buf, "{:?}", node.kind()).unwrap(); | 123 | writeln!(buf, "{:?}", node.kind()).unwrap(); |
124 | } | 124 | } |
125 | ra_syntax::SyntaxElement::Token(token) => match token.kind() { | 125 | NodeOrToken::Token(token) => match token.kind() { |
126 | ra_syntax::SyntaxKind::WHITESPACE => {} | 126 | ra_syntax::SyntaxKind::WHITESPACE => {} |
127 | _ => { | 127 | _ => { |
128 | indent!(); | 128 | indent!(); |
diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml index a5565de33..97b6b047f 100644 --- a/crates/ra_syntax/Cargo.toml +++ b/crates/ra_syntax/Cargo.toml | |||
@@ -10,7 +10,7 @@ repository = "https://github.com/rust-analyzer/rust-analyzer" | |||
10 | [dependencies] | 10 | [dependencies] |
11 | unicode-xid = "0.1.0" | 11 | unicode-xid = "0.1.0" |
12 | itertools = "0.8.0" | 12 | itertools = "0.8.0" |
13 | rowan = "0.5.6" | 13 | rowan = "0.6.0-pre.1" |
14 | 14 | ||
15 | # ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here | 15 | # ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here |
16 | # to reduce number of compilations | 16 | # to reduce number of compilations |
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index f47e11e66..45f624810 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs | |||
@@ -1,18 +1,12 @@ | |||
1 | pub mod visit; | 1 | pub mod visit; |
2 | 2 | ||
3 | use itertools::Itertools; | 3 | use std::ops::RangeInclusive; |
4 | |||
5 | use crate::{AstNode, Direction, SyntaxElement, SyntaxNode, SyntaxToken, TextRange, TextUnit}; | ||
6 | 4 | ||
7 | pub use rowan::TokenAtOffset; | 5 | use itertools::Itertools; |
8 | 6 | ||
9 | pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffset<SyntaxToken> { | 7 | use crate::{ |
10 | match node.0.token_at_offset(offset) { | 8 | AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit, |
11 | TokenAtOffset::None => TokenAtOffset::None, | 9 | }; |
12 | TokenAtOffset::Single(n) => TokenAtOffset::Single(SyntaxToken(n)), | ||
13 | TokenAtOffset::Between(l, r) => TokenAtOffset::Between(SyntaxToken(l), SyntaxToken(r)), | ||
14 | } | ||
15 | } | ||
16 | 10 | ||
17 | /// Returns ancestors of the node at the offset, sorted by length. This should | 11 | /// Returns ancestors of the node at the offset, sorted by length. This should |
18 | /// do the right thing at an edge, e.g. when searching for expressions at `{ | 12 | /// do the right thing at an edge, e.g. when searching for expressions at `{ |
@@ -23,7 +17,7 @@ pub fn ancestors_at_offset( | |||
23 | node: &SyntaxNode, | 17 | node: &SyntaxNode, |
24 | offset: TextUnit, | 18 | offset: TextUnit, |
25 | ) -> impl Iterator<Item = SyntaxNode> { | 19 | ) -> impl Iterator<Item = SyntaxNode> { |
26 | find_token_at_offset(node, offset) | 20 | node.token_at_offset(offset) |
27 | .map(|token| token.parent().ancestors()) | 21 | .map(|token| token.parent().ancestors()) |
28 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) | 22 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) |
29 | } | 23 | } |
@@ -44,20 +38,118 @@ pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) -> | |||
44 | /// Finds the first sibling in the given direction which is not `trivia` | 38 | /// Finds the first sibling in the given direction which is not `trivia` |
45 | pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option<SyntaxElement> { | 39 | pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option<SyntaxElement> { |
46 | return match element { | 40 | return match element { |
47 | SyntaxElement::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia), | 41 | NodeOrToken::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia), |
48 | SyntaxElement::Token(token) => { | 42 | NodeOrToken::Token(token) => token.siblings_with_tokens(direction).skip(1).find(not_trivia), |
49 | token.siblings_with_tokens(direction).skip(1).find(not_trivia) | ||
50 | } | ||
51 | }; | 43 | }; |
52 | 44 | ||
53 | fn not_trivia(element: &SyntaxElement) -> bool { | 45 | fn not_trivia(element: &SyntaxElement) -> bool { |
54 | match element { | 46 | match element { |
55 | SyntaxElement::Node(_) => true, | 47 | NodeOrToken::Node(_) => true, |
56 | SyntaxElement::Token(token) => !token.kind().is_trivia(), | 48 | NodeOrToken::Token(token) => !token.kind().is_trivia(), |
57 | } | 49 | } |
58 | } | 50 | } |
59 | } | 51 | } |
60 | 52 | ||
61 | pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement { | 53 | pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement { |
62 | SyntaxElement::new(root.0.covering_node(range)) | 54 | root.covering_element(range) |
55 | } | ||
56 | |||
57 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | ||
58 | pub enum InsertPosition<T> { | ||
59 | First, | ||
60 | Last, | ||
61 | Before(T), | ||
62 | After(T), | ||
63 | } | ||
64 | |||
65 | /// Adds specified children (tokens or nodes) to the current node at the | ||
66 | /// specific position. | ||
67 | /// | ||
68 | /// This is a type-unsafe low-level editing API, if you need to use it, | ||
69 | /// prefer to create a type-safe abstraction on top of it instead. | ||
70 | pub fn insert_children( | ||
71 | parent: &SyntaxNode, | ||
72 | position: InsertPosition<SyntaxElement>, | ||
73 | to_insert: impl Iterator<Item = SyntaxElement>, | ||
74 | ) -> SyntaxNode { | ||
75 | let mut delta = TextUnit::default(); | ||
76 | let to_insert = to_insert.map(|element| { | ||
77 | delta += element.text_range().len(); | ||
78 | to_green_element(element) | ||
79 | }); | ||
80 | |||
81 | let old_children = parent.green().children(); | ||
82 | |||
83 | let new_children = match &position { | ||
84 | InsertPosition::First => { | ||
85 | to_insert.chain(old_children.iter().cloned()).collect::<Box<[_]>>() | ||
86 | } | ||
87 | InsertPosition::Last => old_children.iter().cloned().chain(to_insert).collect::<Box<[_]>>(), | ||
88 | InsertPosition::Before(anchor) | InsertPosition::After(anchor) => { | ||
89 | let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 }; | ||
90 | let split_at = position_of_child(parent, anchor.clone()) + take_anchor; | ||
91 | let (before, after) = old_children.split_at(split_at); | ||
92 | before | ||
93 | .iter() | ||
94 | .cloned() | ||
95 | .chain(to_insert) | ||
96 | .chain(after.iter().cloned()) | ||
97 | .collect::<Box<[_]>>() | ||
98 | } | ||
99 | }; | ||
100 | |||
101 | with_children(parent, new_children) | ||
102 | } | ||
103 | |||
104 | /// Replaces all nodes in `to_delete` with nodes from `to_insert` | ||
105 | /// | ||
106 | /// This is a type-unsafe low-level editing API, if you need to use it, | ||
107 | /// prefer to create a type-safe abstraction on top of it instead. | ||
108 | pub fn replace_children( | ||
109 | parent: &SyntaxNode, | ||
110 | to_delete: RangeInclusive<SyntaxElement>, | ||
111 | to_insert: impl Iterator<Item = SyntaxElement>, | ||
112 | ) -> SyntaxNode { | ||
113 | let start = position_of_child(parent, to_delete.start().clone()); | ||
114 | let end = position_of_child(parent, to_delete.end().clone()); | ||
115 | let old_children = parent.green().children(); | ||
116 | |||
117 | let new_children = old_children[..start] | ||
118 | .iter() | ||
119 | .cloned() | ||
120 | .chain(to_insert.map(to_green_element)) | ||
121 | .chain(old_children[end + 1..].iter().cloned()) | ||
122 | .collect::<Box<[_]>>(); | ||
123 | with_children(parent, new_children) | ||
124 | } | ||
125 | |||
126 | fn with_children( | ||
127 | parent: &SyntaxNode, | ||
128 | new_children: Box<[NodeOrToken<rowan::GreenNode, rowan::GreenToken>]>, | ||
129 | ) -> SyntaxNode { | ||
130 | let len = new_children.iter().map(|it| it.text_len()).sum::<TextUnit>(); | ||
131 | let new_node = | ||
132 | rowan::GreenNode::new(rowan::cursor::SyntaxKind(parent.kind() as u16), new_children); | ||
133 | let new_root_node = parent.replace_with(new_node); | ||
134 | let new_root_node = SyntaxNode::new_root(new_root_node); | ||
135 | |||
136 | // FIXME: use a more elegant way to re-fetch the node (#1185), make | ||
137 | // `range` private afterwards | ||
138 | let mut ptr = SyntaxNodePtr::new(parent); | ||
139 | ptr.range = TextRange::offset_len(ptr.range().start(), len); | ||
140 | ptr.to_node(&new_root_node) | ||
141 | } | ||
142 | |||
143 | fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize { | ||
144 | parent | ||
145 | .children_with_tokens() | ||
146 | .position(|it| it == child) | ||
147 | .expect("element is not a child of current element") | ||
148 | } | ||
149 | |||
150 | fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { | ||
151 | match element { | ||
152 | NodeOrToken::Node(it) => it.green().clone().into(), | ||
153 | NodeOrToken::Token(it) => it.green().clone().into(), | ||
154 | } | ||
63 | } | 155 | } |
diff --git a/crates/ra_syntax/src/ast/expr_extensions.rs b/crates/ra_syntax/src/ast/expr_extensions.rs index 139bd3ec0..f9190d877 100644 --- a/crates/ra_syntax/src/ast/expr_extensions.rs +++ b/crates/ra_syntax/src/ast/expr_extensions.rs | |||
@@ -2,7 +2,7 @@ | |||
2 | 2 | ||
3 | use crate::{ | 3 | use crate::{ |
4 | ast::{self, child_opt, children, AstChildren, AstNode}, | 4 | ast::{self, child_opt, children, AstChildren, AstNode}, |
5 | SmolStr, SyntaxElement, | 5 | SmolStr, |
6 | SyntaxKind::*, | 6 | SyntaxKind::*, |
7 | SyntaxToken, T, | 7 | SyntaxToken, T, |
8 | }; | 8 | }; |
@@ -229,14 +229,11 @@ pub enum LiteralKind { | |||
229 | 229 | ||
230 | impl ast::Literal { | 230 | impl ast::Literal { |
231 | pub fn token(&self) -> SyntaxToken { | 231 | pub fn token(&self) -> SyntaxToken { |
232 | let elem = self | 232 | self.syntax() |
233 | .syntax() | ||
234 | .children_with_tokens() | 233 | .children_with_tokens() |
235 | .find(|e| e.kind() != ATTR && !e.kind().is_trivia()); | 234 | .find(|e| e.kind() != ATTR && !e.kind().is_trivia()) |
236 | match elem { | 235 | .and_then(|e| e.into_token()) |
237 | Some(SyntaxElement::Token(token)) => token, | 236 | .unwrap() |
238 | _ => unreachable!(), | ||
239 | } | ||
240 | } | 237 | } |
241 | 238 | ||
242 | pub fn kind(&self) -> LiteralKind { | 239 | pub fn kind(&self) -> LiteralKind { |
diff --git a/crates/ra_syntax/src/ast/extensions.rs b/crates/ra_syntax/src/ast/extensions.rs index 753fc42c6..d4873b39a 100644 --- a/crates/ra_syntax/src/ast/extensions.rs +++ b/crates/ra_syntax/src/ast/extensions.rs | |||
@@ -24,10 +24,7 @@ impl ast::NameRef { | |||
24 | } | 24 | } |
25 | 25 | ||
26 | fn text_of_first_token(node: &SyntaxNode) -> &SmolStr { | 26 | fn text_of_first_token(node: &SyntaxNode) -> &SmolStr { |
27 | match node.0.green().children().first() { | 27 | node.green().children().first().and_then(|it| it.as_token()).unwrap().text() |
28 | Some(rowan::GreenElement::Token(it)) => it.text(), | ||
29 | _ => panic!(), | ||
30 | } | ||
31 | } | 28 | } |
32 | 29 | ||
33 | impl ast::Attr { | 30 | impl ast::Attr { |
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index 8af04c136..7f69b86e1 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs | |||
@@ -20,7 +20,6 @@ | |||
20 | //! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md> | 20 | //! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md> |
21 | 21 | ||
22 | mod syntax_node; | 22 | mod syntax_node; |
23 | mod syntax_text; | ||
24 | mod syntax_error; | 23 | mod syntax_error; |
25 | mod parsing; | 24 | mod parsing; |
26 | mod validation; | 25 | mod validation; |
@@ -38,19 +37,17 @@ use ra_text_edit::AtomTextEdit; | |||
38 | use crate::syntax_node::GreenNode; | 37 | use crate::syntax_node::GreenNode; |
39 | 38 | ||
40 | pub use crate::{ | 39 | pub use crate::{ |
40 | algo::InsertPosition, | ||
41 | ast::{AstNode, AstToken}, | 41 | ast::{AstNode, AstToken}, |
42 | parsing::{classify_literal, tokenize, Token}, | 42 | parsing::{classify_literal, tokenize, Token}, |
43 | ptr::{AstPtr, SyntaxNodePtr}, | 43 | ptr::{AstPtr, SyntaxNodePtr}, |
44 | syntax_error::{Location, SyntaxError, SyntaxErrorKind}, | 44 | syntax_error::{Location, SyntaxError, SyntaxErrorKind}, |
45 | syntax_node::{ | 45 | syntax_node::{ |
46 | Direction, InsertPosition, SyntaxElement, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, | 46 | Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, |
47 | WalkEvent, | ||
48 | }, | 47 | }, |
49 | syntax_text::SyntaxText, | ||
50 | }; | 48 | }; |
51 | pub use ra_parser::SyntaxKind; | 49 | pub use ra_parser::{SyntaxKind, T}; |
52 | pub use ra_parser::T; | 50 | pub use rowan::{SmolStr, SyntaxText, TextRange, TextUnit, TokenAtOffset, WalkEvent}; |
53 | pub use rowan::{SmolStr, TextRange, TextUnit}; | ||
54 | 51 | ||
55 | /// `Parse` is the result of the parsing: a syntax tree and a collection of | 52 | /// `Parse` is the result of the parsing: a syntax tree and a collection of |
56 | /// errors. | 53 | /// errors. |
@@ -76,7 +73,7 @@ impl<T> Parse<T> { | |||
76 | } | 73 | } |
77 | 74 | ||
78 | pub fn syntax_node(&self) -> SyntaxNode { | 75 | pub fn syntax_node(&self) -> SyntaxNode { |
79 | SyntaxNode::new(self.green.clone()) | 76 | SyntaxNode::new_root(self.green.clone()) |
80 | } | 77 | } |
81 | } | 78 | } |
82 | 79 | ||
@@ -146,18 +143,17 @@ impl Parse<SourceFile> { | |||
146 | pub use crate::ast::SourceFile; | 143 | pub use crate::ast::SourceFile; |
147 | 144 | ||
148 | impl SourceFile { | 145 | impl SourceFile { |
149 | fn new(green: GreenNode) -> SourceFile { | 146 | pub fn parse(text: &str) -> Parse<SourceFile> { |
150 | let root = SyntaxNode::new(green); | 147 | let (green, mut errors) = parsing::parse_text(text); |
148 | let root = SyntaxNode::new_root(green.clone()); | ||
149 | |||
151 | if cfg!(debug_assertions) { | 150 | if cfg!(debug_assertions) { |
152 | validation::validate_block_structure(&root); | 151 | validation::validate_block_structure(&root); |
153 | } | 152 | } |
154 | assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); | ||
155 | SourceFile::cast(root).unwrap() | ||
156 | } | ||
157 | 153 | ||
158 | pub fn parse(text: &str) -> Parse<SourceFile> { | 154 | errors.extend(validation::validate(&root)); |
159 | let (green, mut errors) = parsing::parse_text(text); | 155 | |
160 | errors.extend(validation::validate(&SourceFile::new(green.clone()))); | 156 | assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); |
161 | Parse { green, errors: Arc::new(errors), _ty: PhantomData } | 157 | Parse { green, errors: Arc::new(errors), _ty: PhantomData } |
162 | } | 158 | } |
163 | } | 159 | } |
@@ -267,8 +263,8 @@ fn api_walkthrough() { | |||
267 | match event { | 263 | match event { |
268 | WalkEvent::Enter(node) => { | 264 | WalkEvent::Enter(node) => { |
269 | let text = match &node { | 265 | let text = match &node { |
270 | SyntaxElement::Node(it) => it.text().to_string(), | 266 | NodeOrToken::Node(it) => it.text().to_string(), |
271 | SyntaxElement::Token(it) => it.text().to_string(), | 267 | NodeOrToken::Token(it) => it.text().to_string(), |
272 | }; | 268 | }; |
273 | buf += &format!("{:indent$}{:?} {:?}\n", " ", text, node.kind(), indent = indent); | 269 | buf += &format!("{:indent$}{:?} {:?}\n", " ", text, node.kind(), indent = indent); |
274 | indent += 2; | 270 | indent += 2; |
diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs index 2f388bdfe..65b8aa10d 100644 --- a/crates/ra_syntax/src/parsing/reparsing.rs +++ b/crates/ra_syntax/src/parsing/reparsing.rs | |||
@@ -16,7 +16,7 @@ use crate::{ | |||
16 | text_token_source::TextTokenSource, | 16 | text_token_source::TextTokenSource, |
17 | text_tree_sink::TextTreeSink, | 17 | text_tree_sink::TextTreeSink, |
18 | }, | 18 | }, |
19 | syntax_node::{GreenNode, GreenToken, SyntaxElement, SyntaxNode}, | 19 | syntax_node::{GreenNode, GreenToken, NodeOrToken, SyntaxElement, SyntaxNode}, |
20 | SyntaxError, | 20 | SyntaxError, |
21 | SyntaxKind::*, | 21 | SyntaxKind::*, |
22 | TextRange, TextUnit, T, | 22 | TextRange, TextUnit, T, |
@@ -70,7 +70,8 @@ fn reparse_token<'node>( | |||
70 | } | 70 | } |
71 | } | 71 | } |
72 | 72 | ||
73 | let new_token = GreenToken::new(rowan::SyntaxKind(token.kind().into()), text.into()); | 73 | let new_token = |
74 | GreenToken::new(rowan::cursor::SyntaxKind(token.kind().into()), text.into()); | ||
74 | Some((token.replace_with(new_token), token.text_range())) | 75 | Some((token.replace_with(new_token), token.text_range())) |
75 | } | 76 | } |
76 | _ => None, | 77 | _ => None, |
@@ -98,8 +99,8 @@ fn get_text_after_edit(element: SyntaxElement, edit: &AtomTextEdit) -> String { | |||
98 | let edit = | 99 | let edit = |
99 | AtomTextEdit::replace(edit.delete - element.text_range().start(), edit.insert.clone()); | 100 | AtomTextEdit::replace(edit.delete - element.text_range().start(), edit.insert.clone()); |
100 | let text = match element { | 101 | let text = match element { |
101 | SyntaxElement::Token(token) => token.text().to_string(), | 102 | NodeOrToken::Token(token) => token.text().to_string(), |
102 | SyntaxElement::Node(node) => node.text().to_string(), | 103 | NodeOrToken::Node(node) => node.text().to_string(), |
103 | }; | 104 | }; |
104 | edit.apply(text) | 105 | edit.apply(text) |
105 | } | 106 | } |
@@ -114,8 +115,8 @@ fn is_contextual_kw(text: &str) -> bool { | |||
114 | fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> { | 115 | fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> { |
115 | let node = algo::find_covering_element(node, range); | 116 | let node = algo::find_covering_element(node, range); |
116 | let mut ancestors = match node { | 117 | let mut ancestors = match node { |
117 | SyntaxElement::Token(it) => it.parent().ancestors(), | 118 | NodeOrToken::Token(it) => it.parent().ancestors(), |
118 | SyntaxElement::Node(it) => it.ancestors(), | 119 | NodeOrToken::Node(it) => it.ancestors(), |
119 | }; | 120 | }; |
120 | ancestors.find_map(|node| { | 121 | ancestors.find_map(|node| { |
121 | let first_child = node.first_child_or_token().map(|it| it.kind()); | 122 | let first_child = node.first_child_or_token().map(|it| it.kind()); |
diff --git a/crates/ra_syntax/src/syntax_node.rs b/crates/ra_syntax/src/syntax_node.rs index c42045d77..b2f5b8c64 100644 --- a/crates/ra_syntax/src/syntax_node.rs +++ b/crates/ra_syntax/src/syntax_node.rs | |||
@@ -6,487 +6,37 @@ | |||
6 | //! The *real* implementation is in the (language-agnostic) `rowan` crate, this | 6 | //! The *real* implementation is in the (language-agnostic) `rowan` crate, this |
7 | //! modules just wraps its API. | 7 | //! modules just wraps its API. |
8 | 8 | ||
9 | use std::{fmt, iter::successors, ops::RangeInclusive}; | ||
10 | |||
11 | use ra_parser::ParseError; | 9 | use ra_parser::ParseError; |
12 | use rowan::GreenNodeBuilder; | 10 | use rowan::{GreenNodeBuilder, Language}; |
13 | 11 | ||
14 | use crate::{ | 12 | use crate::{ |
15 | syntax_error::{SyntaxError, SyntaxErrorKind}, | 13 | syntax_error::{SyntaxError, SyntaxErrorKind}, |
16 | AstNode, Parse, SmolStr, SourceFile, SyntaxKind, SyntaxNodePtr, SyntaxText, TextRange, | 14 | Parse, SmolStr, SyntaxKind, TextUnit, |
17 | TextUnit, | ||
18 | }; | 15 | }; |
19 | 16 | ||
20 | pub use rowan::WalkEvent; | ||
21 | pub(crate) use rowan::{GreenNode, GreenToken}; | 17 | pub(crate) use rowan::{GreenNode, GreenToken}; |
22 | 18 | ||
23 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | 19 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] |
24 | pub enum InsertPosition<T> { | 20 | pub enum RustLanguage {} |
25 | First, | 21 | impl Language for RustLanguage { |
26 | Last, | 22 | type Kind = SyntaxKind; |
27 | Before(T), | ||
28 | After(T), | ||
29 | } | ||
30 | |||
31 | #[derive(PartialEq, Eq, Hash, Clone)] | ||
32 | pub struct SyntaxNode(pub(crate) rowan::cursor::SyntaxNode); | ||
33 | 23 | ||
34 | impl fmt::Debug for SyntaxNode { | 24 | fn kind_from_raw(raw: rowan::cursor::SyntaxKind) -> SyntaxKind { |
35 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | 25 | SyntaxKind::from(raw.0) |
36 | if f.alternate() { | ||
37 | let mut level = 0; | ||
38 | for event in self.preorder_with_tokens() { | ||
39 | match event { | ||
40 | WalkEvent::Enter(element) => { | ||
41 | for _ in 0..level { | ||
42 | write!(f, " ")?; | ||
43 | } | ||
44 | match element { | ||
45 | SyntaxElement::Node(node) => writeln!(f, "{:?}", node)?, | ||
46 | SyntaxElement::Token(token) => writeln!(f, "{:?}", token)?, | ||
47 | } | ||
48 | level += 1; | ||
49 | } | ||
50 | WalkEvent::Leave(_) => level -= 1, | ||
51 | } | ||
52 | } | ||
53 | assert_eq!(level, 0); | ||
54 | Ok(()) | ||
55 | } else { | ||
56 | write!(f, "{:?}@{:?}", self.kind(), self.text_range()) | ||
57 | } | ||
58 | } | 26 | } |
59 | } | ||
60 | 27 | ||
61 | impl fmt::Display for SyntaxNode { | 28 | fn kind_to_raw(kind: SyntaxKind) -> rowan::cursor::SyntaxKind { |
62 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | 29 | rowan::cursor::SyntaxKind(kind.into()) |
63 | fmt::Display::fmt(&self.text(), fmt) | ||
64 | } | 30 | } |
65 | } | 31 | } |
66 | 32 | ||
67 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] | 33 | pub type SyntaxNode = rowan::SyntaxNode<RustLanguage>; |
68 | pub enum Direction { | 34 | pub type SyntaxToken = rowan::SyntaxToken<RustLanguage>; |
69 | Next, | 35 | pub type SyntaxElement = rowan::NodeOrToken<SyntaxNode, SyntaxToken>; |
70 | Prev, | 36 | pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<RustLanguage>; |
71 | } | 37 | pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>; |
72 | |||
73 | impl SyntaxNode { | ||
74 | pub(crate) fn new(green: GreenNode) -> SyntaxNode { | ||
75 | let inner = rowan::cursor::SyntaxNode::new_root(green); | ||
76 | SyntaxNode(inner) | ||
77 | } | ||
78 | |||
79 | pub fn kind(&self) -> SyntaxKind { | ||
80 | self.0.kind().0.into() | ||
81 | } | ||
82 | |||
83 | pub fn text_range(&self) -> TextRange { | ||
84 | self.0.text_range() | ||
85 | } | ||
86 | |||
87 | pub fn text(&self) -> SyntaxText { | ||
88 | SyntaxText::new(self.clone()) | ||
89 | } | ||
90 | |||
91 | pub fn parent(&self) -> Option<SyntaxNode> { | ||
92 | self.0.parent().map(SyntaxNode) | ||
93 | } | ||
94 | |||
95 | pub fn first_child(&self) -> Option<SyntaxNode> { | ||
96 | self.0.first_child().map(SyntaxNode) | ||
97 | } | ||
98 | |||
99 | pub fn first_child_or_token(&self) -> Option<SyntaxElement> { | ||
100 | self.0.first_child_or_token().map(SyntaxElement::new) | ||
101 | } | ||
102 | |||
103 | pub fn last_child(&self) -> Option<SyntaxNode> { | ||
104 | self.0.last_child().map(SyntaxNode) | ||
105 | } | ||
106 | |||
107 | pub fn last_child_or_token(&self) -> Option<SyntaxElement> { | ||
108 | self.0.last_child_or_token().map(SyntaxElement::new) | ||
109 | } | ||
110 | |||
111 | pub fn next_sibling(&self) -> Option<SyntaxNode> { | ||
112 | self.0.next_sibling().map(SyntaxNode) | ||
113 | } | ||
114 | |||
115 | pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> { | ||
116 | self.0.next_sibling_or_token().map(SyntaxElement::new) | ||
117 | } | ||
118 | |||
119 | pub fn prev_sibling(&self) -> Option<SyntaxNode> { | ||
120 | self.0.prev_sibling().map(SyntaxNode) | ||
121 | } | ||
122 | |||
123 | pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> { | ||
124 | self.0.prev_sibling_or_token().map(SyntaxElement::new) | ||
125 | } | ||
126 | |||
127 | pub fn children(&self) -> SyntaxNodeChildren { | ||
128 | SyntaxNodeChildren(self.0.children()) | ||
129 | } | ||
130 | |||
131 | pub fn children_with_tokens(&self) -> SyntaxElementChildren { | ||
132 | SyntaxElementChildren(self.0.children_with_tokens()) | ||
133 | } | ||
134 | |||
135 | pub fn first_token(&self) -> Option<SyntaxToken> { | ||
136 | self.0.first_token().map(SyntaxToken) | ||
137 | } | ||
138 | |||
139 | pub fn last_token(&self) -> Option<SyntaxToken> { | ||
140 | self.0.last_token().map(SyntaxToken) | ||
141 | } | ||
142 | 38 | ||
143 | pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode> { | 39 | pub use rowan::{Direction, NodeOrToken}; |
144 | successors(Some(self.clone()), |node| node.parent()) | ||
145 | } | ||
146 | |||
147 | pub fn descendants(&self) -> impl Iterator<Item = SyntaxNode> { | ||
148 | self.preorder().filter_map(|event| match event { | ||
149 | WalkEvent::Enter(node) => Some(node), | ||
150 | WalkEvent::Leave(_) => None, | ||
151 | }) | ||
152 | } | ||
153 | |||
154 | pub fn descendants_with_tokens(&self) -> impl Iterator<Item = SyntaxElement> { | ||
155 | self.preorder_with_tokens().filter_map(|event| match event { | ||
156 | WalkEvent::Enter(it) => Some(it), | ||
157 | WalkEvent::Leave(_) => None, | ||
158 | }) | ||
159 | } | ||
160 | |||
161 | pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = SyntaxNode> { | ||
162 | successors(Some(self.clone()), move |node| match direction { | ||
163 | Direction::Next => node.next_sibling(), | ||
164 | Direction::Prev => node.prev_sibling(), | ||
165 | }) | ||
166 | } | ||
167 | |||
168 | pub fn siblings_with_tokens( | ||
169 | &self, | ||
170 | direction: Direction, | ||
171 | ) -> impl Iterator<Item = SyntaxElement> { | ||
172 | let me: SyntaxElement = self.clone().into(); | ||
173 | successors(Some(me), move |el| match direction { | ||
174 | Direction::Next => el.next_sibling_or_token(), | ||
175 | Direction::Prev => el.prev_sibling_or_token(), | ||
176 | }) | ||
177 | } | ||
178 | |||
179 | pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<SyntaxNode>> { | ||
180 | self.0.preorder().map(|event| match event { | ||
181 | WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode(n)), | ||
182 | WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxNode(n)), | ||
183 | }) | ||
184 | } | ||
185 | |||
186 | pub fn preorder_with_tokens(&self) -> impl Iterator<Item = WalkEvent<SyntaxElement>> { | ||
187 | self.0.preorder_with_tokens().map(|event| match event { | ||
188 | WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxElement::new(n)), | ||
189 | WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxElement::new(n)), | ||
190 | }) | ||
191 | } | ||
192 | |||
193 | pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode { | ||
194 | self.0.replace_with(replacement) | ||
195 | } | ||
196 | |||
197 | /// Adds specified children (tokens or nodes) to the current node at the | ||
198 | /// specific position. | ||
199 | /// | ||
200 | /// This is a type-unsafe low-level editing API, if you need to use it, | ||
201 | /// prefer to create a type-safe abstraction on top of it instead. | ||
202 | pub fn insert_children( | ||
203 | &self, | ||
204 | position: InsertPosition<SyntaxElement>, | ||
205 | to_insert: impl Iterator<Item = SyntaxElement>, | ||
206 | ) -> SyntaxNode { | ||
207 | let mut delta = TextUnit::default(); | ||
208 | let to_insert = to_insert.map(|element| { | ||
209 | delta += element.text_len(); | ||
210 | to_green_element(element) | ||
211 | }); | ||
212 | |||
213 | let old_children = self.0.green().children(); | ||
214 | |||
215 | let new_children = match &position { | ||
216 | InsertPosition::First => { | ||
217 | to_insert.chain(old_children.iter().cloned()).collect::<Box<[_]>>() | ||
218 | } | ||
219 | InsertPosition::Last => { | ||
220 | old_children.iter().cloned().chain(to_insert).collect::<Box<[_]>>() | ||
221 | } | ||
222 | InsertPosition::Before(anchor) | InsertPosition::After(anchor) => { | ||
223 | let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 }; | ||
224 | let split_at = self.position_of_child(anchor.clone()) + take_anchor; | ||
225 | let (before, after) = old_children.split_at(split_at); | ||
226 | before | ||
227 | .iter() | ||
228 | .cloned() | ||
229 | .chain(to_insert) | ||
230 | .chain(after.iter().cloned()) | ||
231 | .collect::<Box<[_]>>() | ||
232 | } | ||
233 | }; | ||
234 | |||
235 | self.with_children(new_children) | ||
236 | } | ||
237 | |||
238 | /// Replaces all nodes in `to_delete` with nodes from `to_insert` | ||
239 | /// | ||
240 | /// This is a type-unsafe low-level editing API, if you need to use it, | ||
241 | /// prefer to create a type-safe abstraction on top of it instead. | ||
242 | pub fn replace_children( | ||
243 | &self, | ||
244 | to_delete: RangeInclusive<SyntaxElement>, | ||
245 | to_insert: impl Iterator<Item = SyntaxElement>, | ||
246 | ) -> SyntaxNode { | ||
247 | let start = self.position_of_child(to_delete.start().clone()); | ||
248 | let end = self.position_of_child(to_delete.end().clone()); | ||
249 | let old_children = self.0.green().children(); | ||
250 | |||
251 | let new_children = old_children[..start] | ||
252 | .iter() | ||
253 | .cloned() | ||
254 | .chain(to_insert.map(to_green_element)) | ||
255 | .chain(old_children[end + 1..].iter().cloned()) | ||
256 | .collect::<Box<[_]>>(); | ||
257 | self.with_children(new_children) | ||
258 | } | ||
259 | |||
260 | fn with_children(&self, new_children: Box<[rowan::GreenElement]>) -> SyntaxNode { | ||
261 | let len = new_children.iter().map(|it| it.text_len()).sum::<TextUnit>(); | ||
262 | let new_node = GreenNode::new(rowan::SyntaxKind(self.kind() as u16), new_children); | ||
263 | let new_file_node = self.replace_with(new_node); | ||
264 | let file = SourceFile::new(new_file_node); | ||
265 | |||
266 | // FIXME: use a more elegant way to re-fetch the node (#1185), make | ||
267 | // `range` private afterwards | ||
268 | let mut ptr = SyntaxNodePtr::new(self); | ||
269 | ptr.range = TextRange::offset_len(ptr.range().start(), len); | ||
270 | ptr.to_node(file.syntax()).to_owned() | ||
271 | } | ||
272 | |||
273 | fn position_of_child(&self, child: SyntaxElement) -> usize { | ||
274 | self.children_with_tokens() | ||
275 | .position(|it| it == child) | ||
276 | .expect("element is not a child of current element") | ||
277 | } | ||
278 | } | ||
279 | |||
280 | fn to_green_element(element: SyntaxElement) -> rowan::GreenElement { | ||
281 | match element { | ||
282 | SyntaxElement::Node(node) => node.0.green().clone().into(), | ||
283 | SyntaxElement::Token(tok) => { | ||
284 | GreenToken::new(rowan::SyntaxKind(tok.kind() as u16), tok.text().clone()).into() | ||
285 | } | ||
286 | } | ||
287 | } | ||
288 | |||
289 | #[derive(Clone, PartialEq, Eq, Hash)] | ||
290 | pub struct SyntaxToken(pub(crate) rowan::cursor::SyntaxToken); | ||
291 | |||
292 | impl fmt::Debug for SyntaxToken { | ||
293 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
294 | write!(fmt, "{:?}@{:?}", self.kind(), self.text_range())?; | ||
295 | if self.text().len() < 25 { | ||
296 | return write!(fmt, " {:?}", self.text()); | ||
297 | } | ||
298 | let text = self.text().as_str(); | ||
299 | for idx in 21..25 { | ||
300 | if text.is_char_boundary(idx) { | ||
301 | let text = format!("{} ...", &text[..idx]); | ||
302 | return write!(fmt, " {:?}", text); | ||
303 | } | ||
304 | } | ||
305 | unreachable!() | ||
306 | } | ||
307 | } | ||
308 | |||
309 | impl fmt::Display for SyntaxToken { | ||
310 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
311 | fmt::Display::fmt(self.text(), fmt) | ||
312 | } | ||
313 | } | ||
314 | |||
315 | impl SyntaxToken { | ||
316 | pub fn kind(&self) -> SyntaxKind { | ||
317 | self.0.kind().0.into() | ||
318 | } | ||
319 | |||
320 | pub fn text(&self) -> &SmolStr { | ||
321 | self.0.text() | ||
322 | } | ||
323 | |||
324 | pub fn text_range(&self) -> TextRange { | ||
325 | self.0.text_range() | ||
326 | } | ||
327 | |||
328 | pub fn parent(&self) -> SyntaxNode { | ||
329 | SyntaxNode(self.0.parent()) | ||
330 | } | ||
331 | |||
332 | pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> { | ||
333 | self.0.next_sibling_or_token().map(SyntaxElement::new) | ||
334 | } | ||
335 | |||
336 | pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> { | ||
337 | self.0.prev_sibling_or_token().map(SyntaxElement::new) | ||
338 | } | ||
339 | |||
340 | pub fn siblings_with_tokens( | ||
341 | &self, | ||
342 | direction: Direction, | ||
343 | ) -> impl Iterator<Item = SyntaxElement> { | ||
344 | let me: SyntaxElement = self.clone().into(); | ||
345 | successors(Some(me), move |el| match direction { | ||
346 | Direction::Next => el.next_sibling_or_token(), | ||
347 | Direction::Prev => el.prev_sibling_or_token(), | ||
348 | }) | ||
349 | } | ||
350 | |||
351 | pub fn next_token(&self) -> Option<SyntaxToken> { | ||
352 | self.0.next_token().map(SyntaxToken) | ||
353 | } | ||
354 | |||
355 | pub fn prev_token(&self) -> Option<SyntaxToken> { | ||
356 | self.0.prev_token().map(SyntaxToken) | ||
357 | } | ||
358 | |||
359 | pub(crate) fn replace_with(&self, new_token: GreenToken) -> GreenNode { | ||
360 | self.0.replace_with(new_token) | ||
361 | } | ||
362 | } | ||
363 | |||
364 | #[derive(Debug, PartialEq, Eq, Hash, Clone)] | ||
365 | pub enum SyntaxElement { | ||
366 | Node(SyntaxNode), | ||
367 | Token(SyntaxToken), | ||
368 | } | ||
369 | |||
370 | impl From<SyntaxNode> for SyntaxElement { | ||
371 | fn from(node: SyntaxNode) -> Self { | ||
372 | SyntaxElement::Node(node) | ||
373 | } | ||
374 | } | ||
375 | |||
376 | impl From<SyntaxToken> for SyntaxElement { | ||
377 | fn from(token: SyntaxToken) -> Self { | ||
378 | SyntaxElement::Token(token) | ||
379 | } | ||
380 | } | ||
381 | |||
382 | impl fmt::Display for SyntaxElement { | ||
383 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
384 | match self { | ||
385 | SyntaxElement::Node(it) => fmt::Display::fmt(it, fmt), | ||
386 | SyntaxElement::Token(it) => fmt::Display::fmt(it, fmt), | ||
387 | } | ||
388 | } | ||
389 | } | ||
390 | |||
391 | impl SyntaxElement { | ||
392 | pub(crate) fn new(el: rowan::cursor::SyntaxElement) -> Self { | ||
393 | match el { | ||
394 | rowan::cursor::SyntaxElement::Node(it) => SyntaxElement::Node(SyntaxNode(it)), | ||
395 | rowan::cursor::SyntaxElement::Token(it) => SyntaxElement::Token(SyntaxToken(it)), | ||
396 | } | ||
397 | } | ||
398 | |||
399 | pub fn kind(&self) -> SyntaxKind { | ||
400 | match self { | ||
401 | SyntaxElement::Node(it) => it.kind(), | ||
402 | SyntaxElement::Token(it) => it.kind(), | ||
403 | } | ||
404 | } | ||
405 | |||
406 | pub fn as_node(&self) -> Option<&SyntaxNode> { | ||
407 | match self { | ||
408 | SyntaxElement::Node(node) => Some(node), | ||
409 | SyntaxElement::Token(_) => None, | ||
410 | } | ||
411 | } | ||
412 | |||
413 | pub fn into_node(self) -> Option<SyntaxNode> { | ||
414 | match self { | ||
415 | SyntaxElement::Node(node) => Some(node), | ||
416 | SyntaxElement::Token(_) => None, | ||
417 | } | ||
418 | } | ||
419 | |||
420 | pub fn as_token(&self) -> Option<&SyntaxToken> { | ||
421 | match self { | ||
422 | SyntaxElement::Node(_) => None, | ||
423 | SyntaxElement::Token(token) => Some(token), | ||
424 | } | ||
425 | } | ||
426 | |||
427 | pub fn into_token(self) -> Option<SyntaxToken> { | ||
428 | match self { | ||
429 | SyntaxElement::Node(_) => None, | ||
430 | SyntaxElement::Token(token) => Some(token), | ||
431 | } | ||
432 | } | ||
433 | |||
434 | pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> { | ||
435 | match self { | ||
436 | SyntaxElement::Node(it) => it.next_sibling_or_token(), | ||
437 | SyntaxElement::Token(it) => it.next_sibling_or_token(), | ||
438 | } | ||
439 | } | ||
440 | |||
441 | pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> { | ||
442 | match self { | ||
443 | SyntaxElement::Node(it) => it.prev_sibling_or_token(), | ||
444 | SyntaxElement::Token(it) => it.prev_sibling_or_token(), | ||
445 | } | ||
446 | } | ||
447 | |||
448 | pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode> { | ||
449 | match self { | ||
450 | SyntaxElement::Node(it) => it.clone(), | ||
451 | SyntaxElement::Token(it) => it.parent(), | ||
452 | } | ||
453 | .ancestors() | ||
454 | } | ||
455 | |||
456 | pub fn text_range(&self) -> TextRange { | ||
457 | match self { | ||
458 | SyntaxElement::Node(it) => it.text_range(), | ||
459 | SyntaxElement::Token(it) => it.text_range(), | ||
460 | } | ||
461 | } | ||
462 | |||
463 | fn text_len(&self) -> TextUnit { | ||
464 | match self { | ||
465 | SyntaxElement::Node(node) => node.0.green().text_len(), | ||
466 | SyntaxElement::Token(token) => TextUnit::of_str(token.0.text()), | ||
467 | } | ||
468 | } | ||
469 | } | ||
470 | |||
471 | #[derive(Clone, Debug)] | ||
472 | pub struct SyntaxNodeChildren(rowan::cursor::SyntaxNodeChildren); | ||
473 | |||
474 | impl Iterator for SyntaxNodeChildren { | ||
475 | type Item = SyntaxNode; | ||
476 | fn next(&mut self) -> Option<SyntaxNode> { | ||
477 | self.0.next().map(SyntaxNode) | ||
478 | } | ||
479 | } | ||
480 | |||
481 | #[derive(Clone, Debug)] | ||
482 | pub struct SyntaxElementChildren(rowan::cursor::SyntaxElementChildren); | ||
483 | |||
484 | impl Iterator for SyntaxElementChildren { | ||
485 | type Item = SyntaxElement; | ||
486 | fn next(&mut self) -> Option<SyntaxElement> { | ||
487 | self.0.next().map(SyntaxElement::new) | ||
488 | } | ||
489 | } | ||
490 | 40 | ||
491 | pub struct SyntaxTreeBuilder { | 41 | pub struct SyntaxTreeBuilder { |
492 | errors: Vec<SyntaxError>, | 42 | errors: Vec<SyntaxError>, |
@@ -507,19 +57,21 @@ impl SyntaxTreeBuilder { | |||
507 | 57 | ||
508 | pub fn finish(self) -> Parse<SyntaxNode> { | 58 | pub fn finish(self) -> Parse<SyntaxNode> { |
509 | let (green, errors) = self.finish_raw(); | 59 | let (green, errors) = self.finish_raw(); |
510 | let node = SyntaxNode::new(green); | 60 | let node = SyntaxNode::new_root(green); |
511 | if cfg!(debug_assertions) { | 61 | if cfg!(debug_assertions) { |
512 | crate::validation::validate_block_structure(&node); | 62 | crate::validation::validate_block_structure(&node); |
513 | } | 63 | } |
514 | Parse::new(node.0.green().clone(), errors) | 64 | Parse::new(node.green().clone(), errors) |
515 | } | 65 | } |
516 | 66 | ||
517 | pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) { | 67 | pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) { |
518 | self.inner.token(rowan::SyntaxKind(kind.into()), text) | 68 | let kind = RustLanguage::kind_to_raw(kind); |
69 | self.inner.token(kind, text) | ||
519 | } | 70 | } |
520 | 71 | ||
521 | pub fn start_node(&mut self, kind: SyntaxKind) { | 72 | pub fn start_node(&mut self, kind: SyntaxKind) { |
522 | self.inner.start_node(rowan::SyntaxKind(kind.into())) | 73 | let kind = RustLanguage::kind_to_raw(kind); |
74 | self.inner.start_node(kind) | ||
523 | } | 75 | } |
524 | 76 | ||
525 | pub fn finish_node(&mut self) { | 77 | pub fn finish_node(&mut self) { |
diff --git a/crates/ra_syntax/src/syntax_text.rs b/crates/ra_syntax/src/syntax_text.rs deleted file mode 100644 index 652cb7a1e..000000000 --- a/crates/ra_syntax/src/syntax_text.rs +++ /dev/null | |||
@@ -1,178 +0,0 @@ | |||
1 | use std::{ | ||
2 | fmt, | ||
3 | ops::{self, Bound}, | ||
4 | }; | ||
5 | |||
6 | use crate::{SmolStr, SyntaxElement, SyntaxNode, TextRange, TextUnit}; | ||
7 | |||
8 | #[derive(Clone)] | ||
9 | pub struct SyntaxText { | ||
10 | node: SyntaxNode, | ||
11 | range: TextRange, | ||
12 | } | ||
13 | |||
14 | impl SyntaxText { | ||
15 | pub(crate) fn new(node: SyntaxNode) -> SyntaxText { | ||
16 | let range = node.text_range(); | ||
17 | SyntaxText { node, range } | ||
18 | } | ||
19 | |||
20 | pub fn try_fold_chunks<T, F, E>(&self, init: T, mut f: F) -> Result<T, E> | ||
21 | where | ||
22 | F: FnMut(T, &str) -> Result<T, E>, | ||
23 | { | ||
24 | self.node.descendants_with_tokens().try_fold(init, move |acc, element| { | ||
25 | let res = match element { | ||
26 | SyntaxElement::Token(token) => { | ||
27 | let range = match self.range.intersection(&token.text_range()) { | ||
28 | None => return Ok(acc), | ||
29 | Some(it) => it, | ||
30 | }; | ||
31 | let slice = if range == token.text_range() { | ||
32 | token.text() | ||
33 | } else { | ||
34 | let range = range - token.text_range().start(); | ||
35 | &token.text()[range] | ||
36 | }; | ||
37 | f(acc, slice)? | ||
38 | } | ||
39 | SyntaxElement::Node(_) => acc, | ||
40 | }; | ||
41 | Ok(res) | ||
42 | }) | ||
43 | } | ||
44 | |||
45 | pub fn try_for_each_chunk<F: FnMut(&str) -> Result<(), E>, E>( | ||
46 | &self, | ||
47 | mut f: F, | ||
48 | ) -> Result<(), E> { | ||
49 | self.try_fold_chunks((), move |(), chunk| f(chunk)) | ||
50 | } | ||
51 | |||
52 | pub fn for_each_chunk<F: FnMut(&str)>(&self, mut f: F) { | ||
53 | enum Void {} | ||
54 | match self.try_for_each_chunk(|chunk| Ok::<(), Void>(f(chunk))) { | ||
55 | Ok(()) => (), | ||
56 | Err(void) => match void {}, | ||
57 | } | ||
58 | } | ||
59 | |||
60 | pub fn to_smol_string(&self) -> SmolStr { | ||
61 | self.to_string().into() | ||
62 | } | ||
63 | |||
64 | pub fn contains_char(&self, c: char) -> bool { | ||
65 | self.try_for_each_chunk(|chunk| if chunk.contains(c) { Err(()) } else { Ok(()) }).is_err() | ||
66 | } | ||
67 | |||
68 | pub fn find_char(&self, c: char) -> Option<TextUnit> { | ||
69 | let mut acc: TextUnit = 0.into(); | ||
70 | let res = self.try_for_each_chunk(|chunk| { | ||
71 | if let Some(pos) = chunk.find(c) { | ||
72 | let pos: TextUnit = (pos as u32).into(); | ||
73 | return Err(acc + pos); | ||
74 | } | ||
75 | acc += TextUnit::of_str(chunk); | ||
76 | Ok(()) | ||
77 | }); | ||
78 | found(res) | ||
79 | } | ||
80 | |||
81 | pub fn len(&self) -> TextUnit { | ||
82 | self.range.len() | ||
83 | } | ||
84 | |||
85 | pub fn is_empty(&self) -> bool { | ||
86 | self.range.is_empty() | ||
87 | } | ||
88 | |||
89 | pub fn slice(&self, range: impl ops::RangeBounds<TextUnit>) -> SyntaxText { | ||
90 | let start = match range.start_bound() { | ||
91 | Bound::Included(&b) => b, | ||
92 | Bound::Excluded(_) => panic!("utf-aware slicing can't work this way"), | ||
93 | Bound::Unbounded => 0.into(), | ||
94 | }; | ||
95 | let end = match range.end_bound() { | ||
96 | Bound::Included(_) => panic!("utf-aware slicing can't work this way"), | ||
97 | Bound::Excluded(&b) => b, | ||
98 | Bound::Unbounded => self.len(), | ||
99 | }; | ||
100 | assert!(start <= end); | ||
101 | let len = end - start; | ||
102 | let start = self.range.start() + start; | ||
103 | let end = start + len; | ||
104 | assert!( | ||
105 | start <= end, | ||
106 | "invalid slice, range: {:?}, slice: {:?}", | ||
107 | self.range, | ||
108 | (range.start_bound(), range.end_bound()), | ||
109 | ); | ||
110 | let range = TextRange::from_to(start, end); | ||
111 | assert!( | ||
112 | range.is_subrange(&self.range), | ||
113 | "invalid slice, range: {:?}, slice: {:?}", | ||
114 | self.range, | ||
115 | range, | ||
116 | ); | ||
117 | SyntaxText { node: self.node.clone(), range } | ||
118 | } | ||
119 | |||
120 | pub fn char_at(&self, offset: impl Into<TextUnit>) -> Option<char> { | ||
121 | let offset = offset.into(); | ||
122 | let mut start: TextUnit = 0.into(); | ||
123 | let res = self.try_for_each_chunk(|chunk| { | ||
124 | let end = start + TextUnit::of_str(chunk); | ||
125 | if start <= offset && offset < end { | ||
126 | let off: usize = u32::from(offset - start) as usize; | ||
127 | return Err(chunk[off..].chars().next().unwrap()); | ||
128 | } | ||
129 | start = end; | ||
130 | Ok(()) | ||
131 | }); | ||
132 | found(res) | ||
133 | } | ||
134 | } | ||
135 | |||
136 | fn found<T>(res: Result<(), T>) -> Option<T> { | ||
137 | match res { | ||
138 | Ok(()) => None, | ||
139 | Err(it) => Some(it), | ||
140 | } | ||
141 | } | ||
142 | |||
143 | impl fmt::Debug for SyntaxText { | ||
144 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
145 | fmt::Debug::fmt(&self.to_string(), f) | ||
146 | } | ||
147 | } | ||
148 | |||
149 | impl fmt::Display for SyntaxText { | ||
150 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
151 | self.try_for_each_chunk(|chunk| fmt::Display::fmt(chunk, f)) | ||
152 | } | ||
153 | } | ||
154 | |||
155 | impl From<SyntaxText> for String { | ||
156 | fn from(text: SyntaxText) -> String { | ||
157 | text.to_string() | ||
158 | } | ||
159 | } | ||
160 | |||
161 | impl PartialEq<str> for SyntaxText { | ||
162 | fn eq(&self, mut rhs: &str) -> bool { | ||
163 | self.try_for_each_chunk(|chunk| { | ||
164 | if !rhs.starts_with(chunk) { | ||
165 | return Err(()); | ||
166 | } | ||
167 | rhs = &rhs[chunk.len()..]; | ||
168 | Ok(()) | ||
169 | }) | ||
170 | .is_ok() | ||
171 | } | ||
172 | } | ||
173 | |||
174 | impl PartialEq<&'_ str> for SyntaxText { | ||
175 | fn eq(&self, rhs: &&str) -> bool { | ||
176 | self == *rhs | ||
177 | } | ||
178 | } | ||
diff --git a/crates/ra_syntax/src/validation.rs b/crates/ra_syntax/src/validation.rs index 19bdafef2..e03c02d1b 100644 --- a/crates/ra_syntax/src/validation.rs +++ b/crates/ra_syntax/src/validation.rs | |||
@@ -5,16 +5,16 @@ mod field_expr; | |||
5 | 5 | ||
6 | use crate::{ | 6 | use crate::{ |
7 | algo::visit::{visitor_ctx, VisitorCtx}, | 7 | algo::visit::{visitor_ctx, VisitorCtx}, |
8 | ast, AstNode, SourceFile, SyntaxError, | 8 | ast, SyntaxError, |
9 | SyntaxKind::{BYTE, BYTE_STRING, CHAR, STRING}, | 9 | SyntaxKind::{BYTE, BYTE_STRING, CHAR, STRING}, |
10 | SyntaxNode, TextUnit, T, | 10 | SyntaxNode, TextUnit, T, |
11 | }; | 11 | }; |
12 | 12 | ||
13 | pub(crate) use unescape::EscapeError; | 13 | pub(crate) use unescape::EscapeError; |
14 | 14 | ||
15 | pub(crate) fn validate(file: &SourceFile) -> Vec<SyntaxError> { | 15 | pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> { |
16 | let mut errors = Vec::new(); | 16 | let mut errors = Vec::new(); |
17 | for node in file.syntax().descendants() { | 17 | for node in root.descendants() { |
18 | let _ = visitor_ctx(&mut errors) | 18 | let _ = visitor_ctx(&mut errors) |
19 | .visit::<ast::Literal, _>(validate_literal) | 19 | .visit::<ast::Literal, _>(validate_literal) |
20 | .visit::<ast::Block, _>(block::validate_block_node) | 20 | .visit::<ast::Block, _>(block::validate_block_node) |