aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
authorKirill Bulatov <[email protected]>2019-07-21 22:10:29 +0100
committerKirill Bulatov <[email protected]>2019-07-21 22:10:29 +0100
commit31aef808d96b779dbc8ce41e27857965e79bd96f (patch)
tree254d69a1ec3abe6d70b2dd9737ef699f33f88f62 /crates
parentba76017d2eb1b7606106c15478ac658dc32b6dbd (diff)
parentd690249bc81bc265cb3d1836c2922325f4fdb8af (diff)
Merge branch 'master' into add-type-lenses
Diffstat (limited to 'crates')
-rw-r--r--crates/ra_assists/Cargo.toml1
-rw-r--r--crates/ra_assists/src/add_impl.rs5
-rw-r--r--crates/ra_assists/src/assist_ctx.rs5
-rw-r--r--crates/ra_assists/src/ast_editor.rs9
-rw-r--r--crates/ra_assists/src/introduce_variable.rs5
-rw-r--r--crates/ra_assists/src/move_guard.rs8
-rw-r--r--crates/ra_hir/src/source_binder.rs9
-rw-r--r--crates/ra_hir/src/ty.rs2
-rw-r--r--crates/ra_hir/src/ty/infer.rs22
-rw-r--r--crates/ra_hir/src/ty/tests.rs6
-rw-r--r--crates/ra_ide_api/Cargo.toml1
-rw-r--r--crates/ra_ide_api/src/completion.rs2
-rw-r--r--crates/ra_ide_api/src/completion/complete_struct_literal.rs15
-rw-r--r--crates/ra_ide_api/src/completion/complete_struct_pattern.rs94
-rw-r--r--crates/ra_ide_api/src/completion/completion_context.rs15
-rw-r--r--crates/ra_ide_api/src/display/short_label.rs5
-rw-r--r--crates/ra_ide_api/src/extend_selection.rs16
-rw-r--r--crates/ra_ide_api/src/folding_ranges.rs18
-rw-r--r--crates/ra_ide_api/src/goto_type_definition.rs4
-rw-r--r--crates/ra_ide_api/src/join_lines.rs6
-rw-r--r--crates/ra_ide_api/src/matching_brace.rs6
-rw-r--r--crates/ra_ide_api/src/syntax_tree.rs6
-rw-r--r--crates/ra_ide_api/src/typing.rs15
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs13
-rw-r--r--crates/ra_mbe/src/tests.rs6
-rw-r--r--crates/ra_syntax/Cargo.toml2
-rw-r--r--crates/ra_syntax/src/algo.rs130
-rw-r--r--crates/ra_syntax/src/ast/expr_extensions.rs13
-rw-r--r--crates/ra_syntax/src/ast/extensions.rs5
-rw-r--r--crates/ra_syntax/src/lib.rs32
-rw-r--r--crates/ra_syntax/src/parsing/reparsing.rs13
-rw-r--r--crates/ra_syntax/src/syntax_node.rs492
-rw-r--r--crates/ra_syntax/src/syntax_text.rs178
-rw-r--r--crates/ra_syntax/src/validation.rs6
34 files changed, 372 insertions, 793 deletions
diff --git a/crates/ra_assists/Cargo.toml b/crates/ra_assists/Cargo.toml
index 5ddac1e48..2113286a3 100644
--- a/crates/ra_assists/Cargo.toml
+++ b/crates/ra_assists/Cargo.toml
@@ -5,6 +5,7 @@ version = "0.1.0"
5authors = ["rust-analyzer developers"] 5authors = ["rust-analyzer developers"]
6 6
7[dependencies] 7[dependencies]
8format-buf = "1.0.0"
8once_cell = "0.2.0" 9once_cell = "0.2.0"
9join_to_string = "0.1.3" 10join_to_string = "0.1.3"
10itertools = "0.8.0" 11itertools = "0.8.0"
diff --git a/crates/ra_assists/src/add_impl.rs b/crates/ra_assists/src/add_impl.rs
index 59ca88468..4b61f4031 100644
--- a/crates/ra_assists/src/add_impl.rs
+++ b/crates/ra_assists/src/add_impl.rs
@@ -1,5 +1,4 @@
1use std::fmt::Write; 1use format_buf::format;
2
3use hir::db::HirDatabase; 2use hir::db::HirDatabase;
4use join_to_string::join; 3use join_to_string::join;
5use ra_syntax::{ 4use ra_syntax::{
@@ -19,7 +18,7 @@ pub(crate) fn add_impl(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
19 let mut buf = String::new(); 18 let mut buf = String::new();
20 buf.push_str("\n\nimpl"); 19 buf.push_str("\n\nimpl");
21 if let Some(type_params) = &type_params { 20 if let Some(type_params) = &type_params {
22 write!(buf, "{}", type_params.syntax()).unwrap(); 21 format!(buf, "{}", type_params.syntax());
23 } 22 }
24 buf.push_str(" "); 23 buf.push_str(" ");
25 buf.push_str(name.text().as_str()); 24 buf.push_str(name.text().as_str());
diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs
index 4d5a76de6..a12c3ed54 100644
--- a/crates/ra_assists/src/assist_ctx.rs
+++ b/crates/ra_assists/src/assist_ctx.rs
@@ -2,8 +2,9 @@ use hir::db::HirDatabase;
2use ra_db::FileRange; 2use ra_db::FileRange;
3use ra_fmt::{leading_indent, reindent}; 3use ra_fmt::{leading_indent, reindent};
4use ra_syntax::{ 4use ra_syntax::{
5 algo::{find_covering_element, find_node_at_offset, find_token_at_offset, TokenAtOffset}, 5 algo::{find_covering_element, find_node_at_offset},
6 AstNode, SourceFile, SyntaxElement, SyntaxNode, SyntaxToken, TextRange, TextUnit, 6 AstNode, SourceFile, SyntaxElement, SyntaxNode, SyntaxToken, TextRange, TextUnit,
7 TokenAtOffset,
7}; 8};
8use ra_text_edit::TextEditBuilder; 9use ra_text_edit::TextEditBuilder;
9 10
@@ -105,7 +106,7 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> {
105 } 106 }
106 107
107 pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> { 108 pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> {
108 find_token_at_offset(self.source_file.syntax(), self.frange.range.start()) 109 self.source_file.syntax().token_at_offset(self.frange.range.start())
109 } 110 }
110 111
111 pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<N> { 112 pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<N> {
diff --git a/crates/ra_assists/src/ast_editor.rs b/crates/ra_assists/src/ast_editor.rs
index ab6c347ad..95b871b30 100644
--- a/crates/ra_assists/src/ast_editor.rs
+++ b/crates/ra_assists/src/ast_editor.rs
@@ -4,7 +4,10 @@ use arrayvec::ArrayVec;
4use hir::Name; 4use hir::Name;
5use ra_fmt::leading_indent; 5use ra_fmt::leading_indent;
6use ra_syntax::{ 6use ra_syntax::{
7 ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement, SyntaxKind::*, T, 7 algo::{insert_children, replace_children},
8 ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement,
9 SyntaxKind::*,
10 T,
8}; 11};
9use ra_text_edit::TextEditBuilder; 12use ra_text_edit::TextEditBuilder;
10 13
@@ -38,7 +41,7 @@ impl<N: AstNode> AstEditor<N> {
38 position: InsertPosition<SyntaxElement>, 41 position: InsertPosition<SyntaxElement>,
39 to_insert: impl Iterator<Item = SyntaxElement>, 42 to_insert: impl Iterator<Item = SyntaxElement>,
40 ) -> N { 43 ) -> N {
41 let new_syntax = self.ast().syntax().insert_children(position, to_insert); 44 let new_syntax = insert_children(self.ast().syntax(), position, to_insert);
42 N::cast(new_syntax).unwrap() 45 N::cast(new_syntax).unwrap()
43 } 46 }
44 47
@@ -48,7 +51,7 @@ impl<N: AstNode> AstEditor<N> {
48 to_delete: RangeInclusive<SyntaxElement>, 51 to_delete: RangeInclusive<SyntaxElement>,
49 to_insert: impl Iterator<Item = SyntaxElement>, 52 to_insert: impl Iterator<Item = SyntaxElement>,
50 ) -> N { 53 ) -> N {
51 let new_syntax = self.ast().syntax().replace_children(to_delete, to_insert); 54 let new_syntax = replace_children(self.ast().syntax(), to_delete, to_insert);
52 N::cast(new_syntax).unwrap() 55 N::cast(new_syntax).unwrap()
53 } 56 }
54 57
diff --git a/crates/ra_assists/src/introduce_variable.rs b/crates/ra_assists/src/introduce_variable.rs
index 911de2d48..5eb708310 100644
--- a/crates/ra_assists/src/introduce_variable.rs
+++ b/crates/ra_assists/src/introduce_variable.rs
@@ -1,5 +1,4 @@
1use std::fmt::Write; 1use format_buf::format;
2
3use hir::db::HirDatabase; 2use hir::db::HirDatabase;
4use ra_syntax::{ 3use ra_syntax::{
5 ast::{self, AstNode}, 4 ast::{self, AstNode},
@@ -37,7 +36,7 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option
37 buf.push_str("let var_name = "); 36 buf.push_str("let var_name = ");
38 TextUnit::of_str("let ") 37 TextUnit::of_str("let ")
39 }; 38 };
40 write!(buf, "{}", expr.syntax()).unwrap(); 39 format!(buf, "{}", expr.syntax());
41 let full_stmt = ast::ExprStmt::cast(anchor_stmt.clone()); 40 let full_stmt = ast::ExprStmt::cast(anchor_stmt.clone());
42 let is_full_stmt = if let Some(expr_stmt) = &full_stmt { 41 let is_full_stmt = if let Some(expr_stmt) = &full_stmt {
43 Some(expr.syntax().clone()) == expr_stmt.expr().map(|e| e.syntax().clone()) 42 Some(expr.syntax().clone()) == expr_stmt.expr().map(|e| e.syntax().clone())
diff --git a/crates/ra_assists/src/move_guard.rs b/crates/ra_assists/src/move_guard.rs
index 0f3cdbe53..127c9e068 100644
--- a/crates/ra_assists/src/move_guard.rs
+++ b/crates/ra_assists/src/move_guard.rs
@@ -2,7 +2,7 @@ use hir::db::HirDatabase;
2use ra_syntax::{ 2use ra_syntax::{
3 ast, 3 ast,
4 ast::{AstNode, AstToken, IfExpr, MatchArm}, 4 ast::{AstNode, AstToken, IfExpr, MatchArm},
5 SyntaxElement, TextUnit, 5 TextUnit,
6}; 6};
7 7
8use crate::{Assist, AssistCtx, AssistId}; 8use crate::{Assist, AssistCtx, AssistId};
@@ -18,10 +18,10 @@ pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx<impl HirDatabase>) -> Op
18 18
19 ctx.add_action(AssistId("move_guard_to_arm_body"), "move guard to arm body", |edit| { 19 ctx.add_action(AssistId("move_guard_to_arm_body"), "move guard to arm body", |edit| {
20 edit.target(guard.syntax().text_range()); 20 edit.target(guard.syntax().text_range());
21 let offseting_amount = match &space_before_guard { 21 let offseting_amount = match space_before_guard.and_then(|it| it.into_token()) {
22 Some(SyntaxElement::Token(tok)) => { 22 Some(tok) => {
23 if let Some(_) = ast::Whitespace::cast(tok.clone()) { 23 if let Some(_) = ast::Whitespace::cast(tok.clone()) {
24 let ele = space_before_guard.unwrap().text_range(); 24 let ele = tok.text_range();
25 edit.delete(ele); 25 edit.delete(ele);
26 ele.len() 26 ele.len()
27 } else { 27 } else {
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs
index 4c173a4f7..fc9bc33d2 100644
--- a/crates/ra_hir/src/source_binder.rs
+++ b/crates/ra_hir/src/source_binder.rs
@@ -266,9 +266,14 @@ impl SourceAnalyzer {
266 self.infer.as_ref()?.field_resolution(expr_id) 266 self.infer.as_ref()?.field_resolution(expr_id)
267 } 267 }
268 268
269 pub fn resolve_variant(&self, struct_lit: &ast::StructLit) -> Option<crate::VariantDef> { 269 pub fn resolve_struct_literal(&self, struct_lit: &ast::StructLit) -> Option<crate::VariantDef> {
270 let expr_id = self.body_source_map.as_ref()?.node_expr(&struct_lit.clone().into())?; 270 let expr_id = self.body_source_map.as_ref()?.node_expr(&struct_lit.clone().into())?;
271 self.infer.as_ref()?.variant_resolution(expr_id) 271 self.infer.as_ref()?.variant_resolution_for_expr(expr_id)
272 }
273
274 pub fn resolve_struct_pattern(&self, struct_pat: &ast::StructPat) -> Option<crate::VariantDef> {
275 let pat_id = self.body_source_map.as_ref()?.node_pat(&struct_pat.clone().into())?;
276 self.infer.as_ref()?.variant_resolution_for_pat(pat_id)
272 } 277 }
273 278
274 pub fn resolve_macro_call( 279 pub fn resolve_macro_call(
diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs
index 4cf714f5d..82589e504 100644
--- a/crates/ra_hir/src/ty.rs
+++ b/crates/ra_hir/src/ty.rs
@@ -472,7 +472,7 @@ impl Ty {
472 472
473 /// Returns the type parameters of this type if it has some (i.e. is an ADT 473 /// Returns the type parameters of this type if it has some (i.e. is an ADT
474 /// or function); so if `self` is `Option<u32>`, this returns the `u32`. 474 /// or function); so if `self` is `Option<u32>`, this returns the `u32`.
475 fn substs(&self) -> Option<Substs> { 475 pub fn substs(&self) -> Option<Substs> {
476 match self { 476 match self {
477 Ty::Apply(ApplicationTy { parameters, .. }) => Some(parameters.clone()), 477 Ty::Apply(ApplicationTy { parameters, .. }) => Some(parameters.clone()),
478 _ => None, 478 _ => None,
diff --git a/crates/ra_hir/src/ty/infer.rs b/crates/ra_hir/src/ty/infer.rs
index a82dff711..594c5bc79 100644
--- a/crates/ra_hir/src/ty/infer.rs
+++ b/crates/ra_hir/src/ty/infer.rs
@@ -113,7 +113,8 @@ pub struct InferenceResult {
113 method_resolutions: FxHashMap<ExprId, Function>, 113 method_resolutions: FxHashMap<ExprId, Function>,
114 /// For each field access expr, records the field it resolves to. 114 /// For each field access expr, records the field it resolves to.
115 field_resolutions: FxHashMap<ExprId, StructField>, 115 field_resolutions: FxHashMap<ExprId, StructField>,
116 variant_resolutions: FxHashMap<ExprId, VariantDef>, 116 /// For each struct literal, records the variant it resolves to.
117 variant_resolutions: FxHashMap<ExprOrPatId, VariantDef>,
117 /// For each associated item record what it resolves to 118 /// For each associated item record what it resolves to
118 assoc_resolutions: FxHashMap<ExprOrPatId, ImplItem>, 119 assoc_resolutions: FxHashMap<ExprOrPatId, ImplItem>,
119 diagnostics: Vec<InferenceDiagnostic>, 120 diagnostics: Vec<InferenceDiagnostic>,
@@ -128,8 +129,11 @@ impl InferenceResult {
128 pub fn field_resolution(&self, expr: ExprId) -> Option<StructField> { 129 pub fn field_resolution(&self, expr: ExprId) -> Option<StructField> {
129 self.field_resolutions.get(&expr).copied() 130 self.field_resolutions.get(&expr).copied()
130 } 131 }
131 pub fn variant_resolution(&self, expr: ExprId) -> Option<VariantDef> { 132 pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantDef> {
132 self.variant_resolutions.get(&expr).copied() 133 self.variant_resolutions.get(&id.into()).copied()
134 }
135 pub fn variant_resolution_for_pat(&self, id: PatId) -> Option<VariantDef> {
136 self.variant_resolutions.get(&id.into()).copied()
133 } 137 }
134 pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<ImplItem> { 138 pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<ImplItem> {
135 self.assoc_resolutions.get(&id.into()).copied() 139 self.assoc_resolutions.get(&id.into()).copied()
@@ -218,8 +222,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
218 self.result.field_resolutions.insert(expr, field); 222 self.result.field_resolutions.insert(expr, field);
219 } 223 }
220 224
221 fn write_variant_resolution(&mut self, expr: ExprId, variant: VariantDef) { 225 fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantDef) {
222 self.result.variant_resolutions.insert(expr, variant); 226 self.result.variant_resolutions.insert(id, variant);
223 } 227 }
224 228
225 fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: ImplItem) { 229 fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: ImplItem) {
@@ -678,8 +682,12 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
678 subpats: &[FieldPat], 682 subpats: &[FieldPat],
679 expected: &Ty, 683 expected: &Ty,
680 default_bm: BindingMode, 684 default_bm: BindingMode,
685 id: PatId,
681 ) -> Ty { 686 ) -> Ty {
682 let (ty, def) = self.resolve_variant(path); 687 let (ty, def) = self.resolve_variant(path);
688 if let Some(variant) = def {
689 self.write_variant_resolution(id.into(), variant);
690 }
683 691
684 self.unify(&ty, expected); 692 self.unify(&ty, expected);
685 693
@@ -762,7 +770,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
762 self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm) 770 self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm)
763 } 771 }
764 Pat::Struct { path: ref p, args: ref fields } => { 772 Pat::Struct { path: ref p, args: ref fields } => {
765 self.infer_struct_pat(p.as_ref(), fields, expected, default_bm) 773 self.infer_struct_pat(p.as_ref(), fields, expected, default_bm, pat)
766 } 774 }
767 Pat::Path(path) => { 775 Pat::Path(path) => {
768 // FIXME use correct resolver for the surrounding expression 776 // FIXME use correct resolver for the surrounding expression
@@ -1064,7 +1072,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
1064 Expr::StructLit { path, fields, spread } => { 1072 Expr::StructLit { path, fields, spread } => {
1065 let (ty, def_id) = self.resolve_variant(path.as_ref()); 1073 let (ty, def_id) = self.resolve_variant(path.as_ref());
1066 if let Some(variant) = def_id { 1074 if let Some(variant) = def_id {
1067 self.write_variant_resolution(tgt_expr, variant); 1075 self.write_variant_resolution(tgt_expr.into(), variant);
1068 } 1076 }
1069 1077
1070 let substs = ty.substs().unwrap_or_else(Substs::empty); 1078 let substs = ty.substs().unwrap_or_else(Substs::empty);
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs
index 706500484..676711d0a 100644
--- a/crates/ra_hir/src/ty/tests.rs
+++ b/crates/ra_hir/src/ty/tests.rs
@@ -3211,8 +3211,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
3211 ); 3211 );
3212 { 3212 {
3213 let file = db.parse(pos.file_id).ok().unwrap(); 3213 let file = db.parse(pos.file_id).ok().unwrap();
3214 let node = 3214 let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
3215 algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent();
3216 let events = db.log_executed(|| { 3215 let events = db.log_executed(|| {
3217 SourceAnalyzer::new(&db, pos.file_id, &node, None); 3216 SourceAnalyzer::new(&db, pos.file_id, &node, None);
3218 }); 3217 });
@@ -3232,8 +3231,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
3232 3231
3233 { 3232 {
3234 let file = db.parse(pos.file_id).ok().unwrap(); 3233 let file = db.parse(pos.file_id).ok().unwrap();
3235 let node = 3234 let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
3236 algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent();
3237 let events = db.log_executed(|| { 3235 let events = db.log_executed(|| {
3238 SourceAnalyzer::new(&db, pos.file_id, &node, None); 3236 SourceAnalyzer::new(&db, pos.file_id, &node, None);
3239 }); 3237 });
diff --git a/crates/ra_ide_api/Cargo.toml b/crates/ra_ide_api/Cargo.toml
index c49a05de1..78a3db14d 100644
--- a/crates/ra_ide_api/Cargo.toml
+++ b/crates/ra_ide_api/Cargo.toml
@@ -5,6 +5,7 @@ version = "0.1.0"
5authors = ["rust-analyzer developers"] 5authors = ["rust-analyzer developers"]
6 6
7[dependencies] 7[dependencies]
8format-buf = "1.0.0"
8itertools = "0.8.0" 9itertools = "0.8.0"
9join_to_string = "0.1.3" 10join_to_string = "0.1.3"
10log = "0.4.5" 11log = "0.4.5"
diff --git a/crates/ra_ide_api/src/completion.rs b/crates/ra_ide_api/src/completion.rs
index c23b5da59..85160358a 100644
--- a/crates/ra_ide_api/src/completion.rs
+++ b/crates/ra_ide_api/src/completion.rs
@@ -4,6 +4,7 @@ mod presentation;
4 4
5mod complete_dot; 5mod complete_dot;
6mod complete_struct_literal; 6mod complete_struct_literal;
7mod complete_struct_pattern;
7mod complete_pattern; 8mod complete_pattern;
8mod complete_fn_param; 9mod complete_fn_param;
9mod complete_keyword; 10mod complete_keyword;
@@ -65,6 +66,7 @@ pub(crate) fn completions(db: &db::RootDatabase, position: FilePosition) -> Opti
65 complete_scope::complete_scope(&mut acc, &ctx); 66 complete_scope::complete_scope(&mut acc, &ctx);
66 complete_dot::complete_dot(&mut acc, &ctx); 67 complete_dot::complete_dot(&mut acc, &ctx);
67 complete_struct_literal::complete_struct_literal(&mut acc, &ctx); 68 complete_struct_literal::complete_struct_literal(&mut acc, &ctx);
69 complete_struct_pattern::complete_struct_pattern(&mut acc, &ctx);
68 complete_pattern::complete_pattern(&mut acc, &ctx); 70 complete_pattern::complete_pattern(&mut acc, &ctx);
69 complete_postfix::complete_postfix(&mut acc, &ctx); 71 complete_postfix::complete_postfix(&mut acc, &ctx);
70 Some(acc) 72 Some(acc)
diff --git a/crates/ra_ide_api/src/completion/complete_struct_literal.rs b/crates/ra_ide_api/src/completion/complete_struct_literal.rs
index 9410f740f..6aa41f498 100644
--- a/crates/ra_ide_api/src/completion/complete_struct_literal.rs
+++ b/crates/ra_ide_api/src/completion/complete_struct_literal.rs
@@ -1,23 +1,22 @@
1use hir::{Substs, Ty}; 1use hir::Substs;
2 2
3use crate::completion::{CompletionContext, Completions}; 3use crate::completion::{CompletionContext, Completions};
4 4
5/// Complete fields in fields literals. 5/// Complete fields in fields literals.
6pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) { 6pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) {
7 let (ty, variant) = match ctx.struct_lit_syntax.as_ref().and_then(|it| { 7 let (ty, variant) = match ctx.struct_lit_syntax.as_ref().and_then(|it| {
8 Some((ctx.analyzer.type_of(ctx.db, &it.clone().into())?, ctx.analyzer.resolve_variant(it)?)) 8 Some((
9 ctx.analyzer.type_of(ctx.db, &it.clone().into())?,
10 ctx.analyzer.resolve_struct_literal(it)?,
11 ))
9 }) { 12 }) {
10 Some(it) => it, 13 Some(it) => it,
11 _ => return, 14 _ => return,
12 }; 15 };
13 16 let substs = &ty.substs().unwrap_or_else(Substs::empty);
14 let ty_substs = match ty {
15 Ty::Apply(it) => it.parameters,
16 _ => Substs::empty(),
17 };
18 17
19 for field in variant.fields(ctx.db) { 18 for field in variant.fields(ctx.db) {
20 acc.add_field(ctx, field, &ty_substs); 19 acc.add_field(ctx, field, substs);
21 } 20 }
22} 21}
23 22
diff --git a/crates/ra_ide_api/src/completion/complete_struct_pattern.rs b/crates/ra_ide_api/src/completion/complete_struct_pattern.rs
new file mode 100644
index 000000000..d0dde5930
--- /dev/null
+++ b/crates/ra_ide_api/src/completion/complete_struct_pattern.rs
@@ -0,0 +1,94 @@
1use hir::Substs;
2
3use crate::completion::{CompletionContext, Completions};
4
5pub(super) fn complete_struct_pattern(acc: &mut Completions, ctx: &CompletionContext) {
6 let (ty, variant) = match ctx.struct_lit_pat.as_ref().and_then(|it| {
7 Some((
8 ctx.analyzer.type_of_pat(ctx.db, &it.clone().into())?,
9 ctx.analyzer.resolve_struct_pattern(it)?,
10 ))
11 }) {
12 Some(it) => it,
13 _ => return,
14 };
15 let substs = &ty.substs().unwrap_or_else(Substs::empty);
16
17 for field in variant.fields(ctx.db) {
18 acc.add_field(ctx, field, substs);
19 }
20}
21
22#[cfg(test)]
23mod tests {
24 use crate::completion::{do_completion, CompletionItem, CompletionKind};
25 use insta::assert_debug_snapshot_matches;
26
27 fn complete(code: &str) -> Vec<CompletionItem> {
28 do_completion(code, CompletionKind::Reference)
29 }
30
31 #[test]
32 fn test_struct_pattern_field() {
33 let completions = complete(
34 r"
35 struct S { foo: u32 }
36
37 fn process(f: S) {
38 match f {
39 S { f<|>: 92 } => (),
40 }
41 }
42 ",
43 );
44 assert_debug_snapshot_matches!(completions, @r###"
45 â‹®[
46 â‹® CompletionItem {
47 â‹® label: "foo",
48 â‹® source_range: [117; 118),
49 â‹® delete: [117; 118),
50 â‹® insert: "foo",
51 â‹® kind: Field,
52 â‹® detail: "u32",
53 â‹® },
54 â‹®]
55 "###);
56 }
57
58 #[test]
59 fn test_struct_pattern_enum_variant() {
60 let completions = complete(
61 r"
62 enum E {
63 S { foo: u32, bar: () }
64 }
65
66 fn process(e: E) {
67 match e {
68 E::S { <|> } => (),
69 }
70 }
71 ",
72 );
73 assert_debug_snapshot_matches!(completions, @r###"
74 â‹®[
75 â‹® CompletionItem {
76 â‹® label: "bar",
77 â‹® source_range: [161; 161),
78 â‹® delete: [161; 161),
79 â‹® insert: "bar",
80 â‹® kind: Field,
81 â‹® detail: "()",
82 â‹® },
83 â‹® CompletionItem {
84 â‹® label: "foo",
85 â‹® source_range: [161; 161),
86 â‹® delete: [161; 161),
87 â‹® insert: "foo",
88 â‹® kind: Field,
89 â‹® detail: "u32",
90 â‹® },
91 â‹®]
92 "###);
93 }
94}
diff --git a/crates/ra_ide_api/src/completion/completion_context.rs b/crates/ra_ide_api/src/completion/completion_context.rs
index 2f78d5409..dfaa9ce69 100644
--- a/crates/ra_ide_api/src/completion/completion_context.rs
+++ b/crates/ra_ide_api/src/completion/completion_context.rs
@@ -1,6 +1,6 @@
1use hir::source_binder; 1use hir::source_binder;
2use ra_syntax::{ 2use ra_syntax::{
3 algo::{find_covering_element, find_node_at_offset, find_token_at_offset}, 3 algo::{find_covering_element, find_node_at_offset},
4 ast, AstNode, Parse, SourceFile, 4 ast, AstNode, Parse, SourceFile,
5 SyntaxKind::*, 5 SyntaxKind::*,
6 SyntaxNode, SyntaxToken, TextRange, TextUnit, 6 SyntaxNode, SyntaxToken, TextRange, TextUnit,
@@ -21,6 +21,7 @@ pub(crate) struct CompletionContext<'a> {
21 pub(super) function_syntax: Option<ast::FnDef>, 21 pub(super) function_syntax: Option<ast::FnDef>,
22 pub(super) use_item_syntax: Option<ast::UseItem>, 22 pub(super) use_item_syntax: Option<ast::UseItem>,
23 pub(super) struct_lit_syntax: Option<ast::StructLit>, 23 pub(super) struct_lit_syntax: Option<ast::StructLit>,
24 pub(super) struct_lit_pat: Option<ast::StructPat>,
24 pub(super) is_param: bool, 25 pub(super) is_param: bool,
25 /// If a name-binding or reference to a const in a pattern. 26 /// If a name-binding or reference to a const in a pattern.
26 /// Irrefutable patterns (like let) are excluded. 27 /// Irrefutable patterns (like let) are excluded.
@@ -48,7 +49,7 @@ impl<'a> CompletionContext<'a> {
48 ) -> Option<CompletionContext<'a>> { 49 ) -> Option<CompletionContext<'a>> {
49 let module = source_binder::module_from_position(db, position); 50 let module = source_binder::module_from_position(db, position);
50 let token = 51 let token =
51 find_token_at_offset(original_parse.tree().syntax(), position.offset).left_biased()?; 52 original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?;
52 let analyzer = 53 let analyzer =
53 hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset)); 54 hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset));
54 let mut ctx = CompletionContext { 55 let mut ctx = CompletionContext {
@@ -60,6 +61,7 @@ impl<'a> CompletionContext<'a> {
60 function_syntax: None, 61 function_syntax: None,
61 use_item_syntax: None, 62 use_item_syntax: None,
62 struct_lit_syntax: None, 63 struct_lit_syntax: None,
64 struct_lit_pat: None,
63 is_param: false, 65 is_param: false,
64 is_pat_binding: false, 66 is_pat_binding: false,
65 is_trivial_path: false, 67 is_trivial_path: false,
@@ -106,8 +108,7 @@ impl<'a> CompletionContext<'a> {
106 // Otherwise, see if this is a declaration. We can use heuristics to 108 // Otherwise, see if this is a declaration. We can use heuristics to
107 // suggest declaration names, see `CompletionKind::Magic`. 109 // suggest declaration names, see `CompletionKind::Magic`.
108 if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) { 110 if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) {
109 if is_node::<ast::BindPat>(name.syntax()) { 111 if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) {
110 let bind_pat = name.syntax().ancestors().find_map(ast::BindPat::cast).unwrap();
111 let parent = bind_pat.syntax().parent(); 112 let parent = bind_pat.syntax().parent();
112 if parent.clone().and_then(ast::MatchArm::cast).is_some() 113 if parent.clone().and_then(ast::MatchArm::cast).is_some()
113 || parent.and_then(ast::Condition::cast).is_some() 114 || parent.and_then(ast::Condition::cast).is_some()
@@ -119,6 +120,10 @@ impl<'a> CompletionContext<'a> {
119 self.is_param = true; 120 self.is_param = true;
120 return; 121 return;
121 } 122 }
123 if name.syntax().ancestors().find_map(ast::FieldPatList::cast).is_some() {
124 self.struct_lit_pat =
125 find_node_at_offset(original_parse.tree().syntax(), self.offset);
126 }
122 } 127 }
123 } 128 }
124 129
@@ -235,7 +240,7 @@ fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Op
235} 240}
236 241
237fn is_node<N: AstNode>(node: &SyntaxNode) -> bool { 242fn is_node<N: AstNode>(node: &SyntaxNode) -> bool {
238 match node.ancestors().filter_map(N::cast).next() { 243 match node.ancestors().find_map(N::cast) {
239 None => false, 244 None => false,
240 Some(n) => n.syntax().text_range() == node.text_range(), 245 Some(n) => n.syntax().text_range() == node.text_range(),
241 } 246 }
diff --git a/crates/ra_ide_api/src/display/short_label.rs b/crates/ra_ide_api/src/display/short_label.rs
index be499e485..825a033ee 100644
--- a/crates/ra_ide_api/src/display/short_label.rs
+++ b/crates/ra_ide_api/src/display/short_label.rs
@@ -1,5 +1,4 @@
1use std::fmt::Write; 1use format_buf::format;
2
3use ra_syntax::ast::{self, AstNode, NameOwner, TypeAscriptionOwner, VisibilityOwner}; 2use ra_syntax::ast::{self, AstNode, NameOwner, TypeAscriptionOwner, VisibilityOwner};
4 3
5pub(crate) trait ShortLabel { 4pub(crate) trait ShortLabel {
@@ -73,7 +72,7 @@ where
73 let mut buf = short_label_from_node(node, prefix)?; 72 let mut buf = short_label_from_node(node, prefix)?;
74 73
75 if let Some(type_ref) = node.ascribed_type() { 74 if let Some(type_ref) = node.ascribed_type() {
76 write!(buf, ": {}", type_ref.syntax()).unwrap(); 75 format!(buf, ": {}", type_ref.syntax());
77 } 76 }
78 77
79 Some(buf) 78 Some(buf)
diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs
index 140820df6..edbf622c1 100644
--- a/crates/ra_ide_api/src/extend_selection.rs
+++ b/crates/ra_ide_api/src/extend_selection.rs
@@ -1,10 +1,10 @@
1use ra_db::SourceDatabase; 1use ra_db::SourceDatabase;
2use ra_syntax::{ 2use ra_syntax::{
3 algo::{find_covering_element, find_token_at_offset, TokenAtOffset}, 3 algo::find_covering_element,
4 ast::{self, AstNode, AstToken}, 4 ast::{self, AstNode, AstToken},
5 Direction, SyntaxElement, 5 Direction, NodeOrToken,
6 SyntaxKind::*, 6 SyntaxKind::*,
7 SyntaxNode, SyntaxToken, TextRange, TextUnit, T, 7 SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T,
8}; 8};
9 9
10use crate::{db::RootDatabase, FileRange}; 10use crate::{db::RootDatabase, FileRange};
@@ -34,7 +34,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
34 34
35 if range.is_empty() { 35 if range.is_empty() {
36 let offset = range.start(); 36 let offset = range.start();
37 let mut leaves = find_token_at_offset(root, offset); 37 let mut leaves = root.token_at_offset(offset);
38 if leaves.clone().all(|it| it.kind() == WHITESPACE) { 38 if leaves.clone().all(|it| it.kind() == WHITESPACE) {
39 return Some(extend_ws(root, leaves.next()?, offset)); 39 return Some(extend_ws(root, leaves.next()?, offset));
40 } 40 }
@@ -53,7 +53,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
53 return Some(leaf_range); 53 return Some(leaf_range);
54 }; 54 };
55 let node = match find_covering_element(root, range) { 55 let node = match find_covering_element(root, range) {
56 SyntaxElement::Token(token) => { 56 NodeOrToken::Token(token) => {
57 if token.text_range() != range { 57 if token.text_range() != range {
58 return Some(token.text_range()); 58 return Some(token.text_range());
59 } 59 }
@@ -64,7 +64,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
64 } 64 }
65 token.parent() 65 token.parent()
66 } 66 }
67 SyntaxElement::Node(node) => node, 67 NodeOrToken::Node(node) => node,
68 }; 68 };
69 if node.text_range() != range { 69 if node.text_range() != range {
70 return Some(node.text_range()); 70 return Some(node.text_range());
@@ -153,8 +153,8 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
153 node.siblings_with_tokens(dir) 153 node.siblings_with_tokens(dir)
154 .skip(1) 154 .skip(1)
155 .skip_while(|node| match node { 155 .skip_while(|node| match node {
156 SyntaxElement::Node(_) => false, 156 NodeOrToken::Node(_) => false,
157 SyntaxElement::Token(it) => is_single_line_ws(it), 157 NodeOrToken::Token(it) => is_single_line_ws(it),
158 }) 158 })
159 .next() 159 .next()
160 .and_then(|it| it.into_token()) 160 .and_then(|it| it.into_token())
diff --git a/crates/ra_ide_api/src/folding_ranges.rs b/crates/ra_ide_api/src/folding_ranges.rs
index 571d1c595..e60ae8cf6 100644
--- a/crates/ra_ide_api/src/folding_ranges.rs
+++ b/crates/ra_ide_api/src/folding_ranges.rs
@@ -2,7 +2,7 @@ use rustc_hash::FxHashSet;
2 2
3use ra_syntax::{ 3use ra_syntax::{
4 ast::{self, AstNode, AstToken, VisibilityOwner}, 4 ast::{self, AstNode, AstToken, VisibilityOwner},
5 Direction, SourceFile, SyntaxElement, 5 Direction, NodeOrToken, SourceFile,
6 SyntaxKind::{self, *}, 6 SyntaxKind::{self, *},
7 SyntaxNode, TextRange, 7 SyntaxNode, TextRange,
8}; 8};
@@ -31,8 +31,8 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
31 // Fold items that span multiple lines 31 // Fold items that span multiple lines
32 if let Some(kind) = fold_kind(element.kind()) { 32 if let Some(kind) = fold_kind(element.kind()) {
33 let is_multiline = match &element { 33 let is_multiline = match &element {
34 SyntaxElement::Node(node) => node.text().contains_char('\n'), 34 NodeOrToken::Node(node) => node.text().contains_char('\n'),
35 SyntaxElement::Token(token) => token.text().contains('\n'), 35 NodeOrToken::Token(token) => token.text().contains('\n'),
36 }; 36 };
37 if is_multiline { 37 if is_multiline {
38 res.push(Fold { range: element.text_range(), kind }); 38 res.push(Fold { range: element.text_range(), kind });
@@ -41,7 +41,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
41 } 41 }
42 42
43 match element { 43 match element {
44 SyntaxElement::Token(token) => { 44 NodeOrToken::Token(token) => {
45 // Fold groups of comments 45 // Fold groups of comments
46 if let Some(comment) = ast::Comment::cast(token) { 46 if let Some(comment) = ast::Comment::cast(token) {
47 if !visited_comments.contains(&comment) { 47 if !visited_comments.contains(&comment) {
@@ -53,7 +53,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
53 } 53 }
54 } 54 }
55 } 55 }
56 SyntaxElement::Node(node) => { 56 NodeOrToken::Node(node) => {
57 // Fold groups of imports 57 // Fold groups of imports
58 if node.kind() == USE_ITEM && !visited_imports.contains(&node) { 58 if node.kind() == USE_ITEM && !visited_imports.contains(&node) {
59 if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) { 59 if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) {
@@ -108,7 +108,7 @@ fn contiguous_range_for_group_unless(
108 let mut last = first.clone(); 108 let mut last = first.clone();
109 for element in first.siblings_with_tokens(Direction::Next) { 109 for element in first.siblings_with_tokens(Direction::Next) {
110 let node = match element { 110 let node = match element {
111 SyntaxElement::Token(token) => { 111 NodeOrToken::Token(token) => {
112 if let Some(ws) = ast::Whitespace::cast(token) { 112 if let Some(ws) = ast::Whitespace::cast(token) {
113 if !ws.spans_multiple_lines() { 113 if !ws.spans_multiple_lines() {
114 // Ignore whitespace without blank lines 114 // Ignore whitespace without blank lines
@@ -119,7 +119,7 @@ fn contiguous_range_for_group_unless(
119 // group ends here 119 // group ends here
120 break; 120 break;
121 } 121 }
122 SyntaxElement::Node(node) => node, 122 NodeOrToken::Node(node) => node,
123 }; 123 };
124 124
125 // Stop if we find a node that doesn't belong to the group 125 // Stop if we find a node that doesn't belong to the group
@@ -154,7 +154,7 @@ fn contiguous_range_for_comment(
154 let mut last = first.clone(); 154 let mut last = first.clone();
155 for element in first.syntax().siblings_with_tokens(Direction::Next) { 155 for element in first.syntax().siblings_with_tokens(Direction::Next) {
156 match element { 156 match element {
157 SyntaxElement::Token(token) => { 157 NodeOrToken::Token(token) => {
158 if let Some(ws) = ast::Whitespace::cast(token.clone()) { 158 if let Some(ws) = ast::Whitespace::cast(token.clone()) {
159 if !ws.spans_multiple_lines() { 159 if !ws.spans_multiple_lines() {
160 // Ignore whitespace without blank lines 160 // Ignore whitespace without blank lines
@@ -173,7 +173,7 @@ fn contiguous_range_for_comment(
173 // * A comment of a different flavor was reached 173 // * A comment of a different flavor was reached
174 break; 174 break;
175 } 175 }
176 SyntaxElement::Node(_) => break, 176 NodeOrToken::Node(_) => break,
177 }; 177 };
178 } 178 }
179 179
diff --git a/crates/ra_ide_api/src/goto_type_definition.rs b/crates/ra_ide_api/src/goto_type_definition.rs
index 007259d9e..72884e5ca 100644
--- a/crates/ra_ide_api/src/goto_type_definition.rs
+++ b/crates/ra_ide_api/src/goto_type_definition.rs
@@ -1,5 +1,5 @@
1use ra_db::SourceDatabase; 1use ra_db::SourceDatabase;
2use ra_syntax::{algo::find_token_at_offset, ast, AstNode}; 2use ra_syntax::{ast, AstNode};
3 3
4use crate::{db::RootDatabase, FilePosition, NavigationTarget, RangeInfo}; 4use crate::{db::RootDatabase, FilePosition, NavigationTarget, RangeInfo};
5 5
@@ -9,7 +9,7 @@ pub(crate) fn goto_type_definition(
9) -> Option<RangeInfo<Vec<NavigationTarget>>> { 9) -> Option<RangeInfo<Vec<NavigationTarget>>> {
10 let parse = db.parse(position.file_id); 10 let parse = db.parse(position.file_id);
11 11
12 let node = find_token_at_offset(parse.tree().syntax(), position.offset).find_map(|token| { 12 let node = parse.tree().syntax().token_at_offset(position.offset).find_map(|token| {
13 token 13 token
14 .parent() 14 .parent()
15 .ancestors() 15 .ancestors()
diff --git a/crates/ra_ide_api/src/join_lines.rs b/crates/ra_ide_api/src/join_lines.rs
index 7f25f2108..a2e4b6f3c 100644
--- a/crates/ra_ide_api/src/join_lines.rs
+++ b/crates/ra_ide_api/src/join_lines.rs
@@ -3,7 +3,7 @@ use ra_fmt::{compute_ws, extract_trivial_expression};
3use ra_syntax::{ 3use ra_syntax::{
4 algo::{find_covering_element, non_trivia_sibling}, 4 algo::{find_covering_element, non_trivia_sibling},
5 ast::{self, AstNode, AstToken}, 5 ast::{self, AstNode, AstToken},
6 Direction, SourceFile, SyntaxElement, 6 Direction, NodeOrToken, SourceFile,
7 SyntaxKind::{self, WHITESPACE}, 7 SyntaxKind::{self, WHITESPACE},
8 SyntaxNode, SyntaxToken, TextRange, TextUnit, T, 8 SyntaxNode, SyntaxToken, TextRange, TextUnit, T,
9}; 9};
@@ -23,8 +23,8 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
23 }; 23 };
24 24
25 let node = match find_covering_element(file.syntax(), range) { 25 let node = match find_covering_element(file.syntax(), range) {
26 SyntaxElement::Node(node) => node, 26 NodeOrToken::Node(node) => node,
27 SyntaxElement::Token(token) => token.parent(), 27 NodeOrToken::Token(token) => token.parent(),
28 }; 28 };
29 let mut edit = TextEditBuilder::default(); 29 let mut edit = TextEditBuilder::default();
30 for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) { 30 for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) {
diff --git a/crates/ra_ide_api/src/matching_brace.rs b/crates/ra_ide_api/src/matching_brace.rs
index 1e2fac848..e802d01e4 100644
--- a/crates/ra_ide_api/src/matching_brace.rs
+++ b/crates/ra_ide_api/src/matching_brace.rs
@@ -1,9 +1,11 @@
1use ra_syntax::{algo::find_token_at_offset, ast::AstNode, SourceFile, SyntaxKind, TextUnit, T}; 1use ra_syntax::{ast::AstNode, SourceFile, SyntaxKind, TextUnit, T};
2 2
3pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> { 3pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> {
4 const BRACES: &[SyntaxKind] = 4 const BRACES: &[SyntaxKind] =
5 &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>]]; 5 &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>]];
6 let (brace_node, brace_idx) = find_token_at_offset(file.syntax(), offset) 6 let (brace_node, brace_idx) = file
7 .syntax()
8 .token_at_offset(offset)
7 .filter_map(|node| { 9 .filter_map(|node| {
8 let idx = BRACES.iter().position(|&brace| brace == node.kind())?; 10 let idx = BRACES.iter().position(|&brace| brace == node.kind())?;
9 Some((node, idx)) 11 Some((node, idx))
diff --git a/crates/ra_ide_api/src/syntax_tree.rs b/crates/ra_ide_api/src/syntax_tree.rs
index 76c50f6d6..a07e670fa 100644
--- a/crates/ra_ide_api/src/syntax_tree.rs
+++ b/crates/ra_ide_api/src/syntax_tree.rs
@@ -1,7 +1,7 @@
1use crate::db::RootDatabase; 1use crate::db::RootDatabase;
2use ra_db::SourceDatabase; 2use ra_db::SourceDatabase;
3use ra_syntax::{ 3use ra_syntax::{
4 algo, AstNode, SourceFile, SyntaxElement, 4 algo, AstNode, NodeOrToken, SourceFile,
5 SyntaxKind::{RAW_STRING, STRING}, 5 SyntaxKind::{RAW_STRING, STRING},
6 SyntaxToken, TextRange, 6 SyntaxToken, TextRange,
7}; 7};
@@ -16,8 +16,8 @@ pub(crate) fn syntax_tree(
16 let parse = db.parse(file_id); 16 let parse = db.parse(file_id);
17 if let Some(text_range) = text_range { 17 if let Some(text_range) = text_range {
18 let node = match algo::find_covering_element(parse.tree().syntax(), text_range) { 18 let node = match algo::find_covering_element(parse.tree().syntax(), text_range) {
19 SyntaxElement::Node(node) => node, 19 NodeOrToken::Node(node) => node,
20 SyntaxElement::Token(token) => { 20 NodeOrToken::Token(token) => {
21 if let Some(tree) = syntax_tree_for_string(&token, text_range) { 21 if let Some(tree) = syntax_tree_for_string(&token, text_range) {
22 return tree; 22 return tree;
23 } 23 }
diff --git a/crates/ra_ide_api/src/typing.rs b/crates/ra_ide_api/src/typing.rs
index 5a1cbcc49..6b3fd5904 100644
--- a/crates/ra_ide_api/src/typing.rs
+++ b/crates/ra_ide_api/src/typing.rs
@@ -1,11 +1,11 @@
1use ra_db::{FilePosition, SourceDatabase}; 1use ra_db::{FilePosition, SourceDatabase};
2use ra_fmt::leading_indent; 2use ra_fmt::leading_indent;
3use ra_syntax::{ 3use ra_syntax::{
4 algo::{find_node_at_offset, find_token_at_offset, TokenAtOffset}, 4 algo::find_node_at_offset,
5 ast::{self, AstToken}, 5 ast::{self, AstToken},
6 AstNode, SmolStr, SourceFile, 6 AstNode, SmolStr, SourceFile,
7 SyntaxKind::*, 7 SyntaxKind::*,
8 SyntaxToken, TextRange, TextUnit, 8 SyntaxToken, TextRange, TextUnit, TokenAtOffset,
9}; 9};
10use ra_text_edit::{TextEdit, TextEditBuilder}; 10use ra_text_edit::{TextEdit, TextEditBuilder};
11 11
@@ -14,7 +14,9 @@ use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
14pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> { 14pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
15 let parse = db.parse(position.file_id); 15 let parse = db.parse(position.file_id);
16 let file = parse.tree(); 16 let file = parse.tree();
17 let comment = find_token_at_offset(file.syntax(), position.offset) 17 let comment = file
18 .syntax()
19 .token_at_offset(position.offset)
18 .left_biased() 20 .left_biased()
19 .and_then(ast::Comment::cast)?; 21 .and_then(ast::Comment::cast)?;
20 22
@@ -45,7 +47,7 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour
45} 47}
46 48
47fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> { 49fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> {
48 let ws = match find_token_at_offset(file.syntax(), token.text_range().start()) { 50 let ws = match file.syntax().token_at_offset(token.text_range().start()) {
49 TokenAtOffset::Between(l, r) => { 51 TokenAtOffset::Between(l, r) => {
50 assert!(r == *token); 52 assert!(r == *token);
51 l 53 l
@@ -91,7 +93,10 @@ pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<
91 let parse = db.parse(position.file_id); 93 let parse = db.parse(position.file_id);
92 assert_eq!(parse.tree().syntax().text().char_at(position.offset), Some('.')); 94 assert_eq!(parse.tree().syntax().text().char_at(position.offset), Some('.'));
93 95
94 let whitespace = find_token_at_offset(parse.tree().syntax(), position.offset) 96 let whitespace = parse
97 .tree()
98 .syntax()
99 .token_at_offset(position.offset)
95 .left_biased() 100 .left_biased()
96 .and_then(ast::Whitespace::cast)?; 101 .and_then(ast::Whitespace::cast)?;
97 102
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index 7ff0fc472..8225759e7 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -1,12 +1,13 @@
1use crate::subtree_source::SubtreeTokenSource;
2use crate::ExpandError;
3use ra_parser::{ParseError, TreeSink}; 1use ra_parser::{ParseError, TreeSink};
4use ra_syntax::{ 2use ra_syntax::{
5 ast, AstNode, AstToken, Parse, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxNode, 3 ast, AstNode, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode,
6 SyntaxTreeBuilder, TextRange, TextUnit, T, 4 SyntaxTreeBuilder, TextRange, TextUnit, T,
7}; 5};
8use tt::buffer::{Cursor, TokenBuffer}; 6use tt::buffer::{Cursor, TokenBuffer};
9 7
8use crate::subtree_source::SubtreeTokenSource;
9use crate::ExpandError;
10
10/// Maps `tt::TokenId` to the relative range of the original token. 11/// Maps `tt::TokenId` to the relative range of the original token.
11#[derive(Default)] 12#[derive(Default)]
12pub struct TokenMap { 13pub struct TokenMap {
@@ -200,7 +201,7 @@ fn convert_tt(
200 } 201 }
201 202
202 match child { 203 match child {
203 SyntaxElement::Token(token) => { 204 NodeOrToken::Token(token) => {
204 if let Some(doc_tokens) = convert_doc_comment(&token) { 205 if let Some(doc_tokens) = convert_doc_comment(&token) {
205 token_trees.extend(doc_tokens); 206 token_trees.extend(doc_tokens);
206 } else if token.kind().is_trivia() { 207 } else if token.kind().is_trivia() {
@@ -210,7 +211,7 @@ fn convert_tt(
210 let char = token.text().chars().next().unwrap(); 211 let char = token.text().chars().next().unwrap();
211 212
212 let spacing = match child_iter.peek() { 213 let spacing = match child_iter.peek() {
213 Some(SyntaxElement::Token(token)) => { 214 Some(NodeOrToken::Token(token)) => {
214 if token.kind().is_punct() { 215 if token.kind().is_punct() {
215 tt::Spacing::Joint 216 tt::Spacing::Joint
216 } else { 217 } else {
@@ -241,7 +242,7 @@ fn convert_tt(
241 token_trees.push(child); 242 token_trees.push(child);
242 } 243 }
243 } 244 }
244 SyntaxElement::Node(node) => { 245 NodeOrToken::Node(node) => {
245 let child = convert_tt(token_map, global_offset, &node)?.into(); 246 let child = convert_tt(token_map, global_offset, &node)?.into();
246 token_trees.push(child); 247 token_trees.push(child);
247 } 248 }
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs
index 9151b6ecd..192e9007d 100644
--- a/crates/ra_mbe/src/tests.rs
+++ b/crates/ra_mbe/src/tests.rs
@@ -1,4 +1,4 @@
1use ra_syntax::{ast, AstNode}; 1use ra_syntax::{ast, AstNode, NodeOrToken};
2 2
3use super::*; 3use super::*;
4 4
@@ -118,11 +118,11 @@ pub fn debug_dump_ignore_spaces(node: &ra_syntax::SyntaxNode) -> String {
118 match event { 118 match event {
119 WalkEvent::Enter(element) => { 119 WalkEvent::Enter(element) => {
120 match element { 120 match element {
121 ra_syntax::SyntaxElement::Node(node) => { 121 NodeOrToken::Node(node) => {
122 indent!(); 122 indent!();
123 writeln!(buf, "{:?}", node.kind()).unwrap(); 123 writeln!(buf, "{:?}", node.kind()).unwrap();
124 } 124 }
125 ra_syntax::SyntaxElement::Token(token) => match token.kind() { 125 NodeOrToken::Token(token) => match token.kind() {
126 ra_syntax::SyntaxKind::WHITESPACE => {} 126 ra_syntax::SyntaxKind::WHITESPACE => {}
127 _ => { 127 _ => {
128 indent!(); 128 indent!();
diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml
index a5565de33..97b6b047f 100644
--- a/crates/ra_syntax/Cargo.toml
+++ b/crates/ra_syntax/Cargo.toml
@@ -10,7 +10,7 @@ repository = "https://github.com/rust-analyzer/rust-analyzer"
10[dependencies] 10[dependencies]
11unicode-xid = "0.1.0" 11unicode-xid = "0.1.0"
12itertools = "0.8.0" 12itertools = "0.8.0"
13rowan = "0.5.6" 13rowan = "0.6.0-pre.1"
14 14
15# ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here 15# ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here
16# to reduce number of compilations 16# to reduce number of compilations
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs
index f47e11e66..45f624810 100644
--- a/crates/ra_syntax/src/algo.rs
+++ b/crates/ra_syntax/src/algo.rs
@@ -1,18 +1,12 @@
1pub mod visit; 1pub mod visit;
2 2
3use itertools::Itertools; 3use std::ops::RangeInclusive;
4
5use crate::{AstNode, Direction, SyntaxElement, SyntaxNode, SyntaxToken, TextRange, TextUnit};
6 4
7pub use rowan::TokenAtOffset; 5use itertools::Itertools;
8 6
9pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffset<SyntaxToken> { 7use crate::{
10 match node.0.token_at_offset(offset) { 8 AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit,
11 TokenAtOffset::None => TokenAtOffset::None, 9};
12 TokenAtOffset::Single(n) => TokenAtOffset::Single(SyntaxToken(n)),
13 TokenAtOffset::Between(l, r) => TokenAtOffset::Between(SyntaxToken(l), SyntaxToken(r)),
14 }
15}
16 10
17/// Returns ancestors of the node at the offset, sorted by length. This should 11/// Returns ancestors of the node at the offset, sorted by length. This should
18/// do the right thing at an edge, e.g. when searching for expressions at `{ 12/// do the right thing at an edge, e.g. when searching for expressions at `{
@@ -23,7 +17,7 @@ pub fn ancestors_at_offset(
23 node: &SyntaxNode, 17 node: &SyntaxNode,
24 offset: TextUnit, 18 offset: TextUnit,
25) -> impl Iterator<Item = SyntaxNode> { 19) -> impl Iterator<Item = SyntaxNode> {
26 find_token_at_offset(node, offset) 20 node.token_at_offset(offset)
27 .map(|token| token.parent().ancestors()) 21 .map(|token| token.parent().ancestors())
28 .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) 22 .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
29} 23}
@@ -44,20 +38,118 @@ pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) ->
44/// Finds the first sibling in the given direction which is not `trivia` 38/// Finds the first sibling in the given direction which is not `trivia`
45pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option<SyntaxElement> { 39pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option<SyntaxElement> {
46 return match element { 40 return match element {
47 SyntaxElement::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia), 41 NodeOrToken::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia),
48 SyntaxElement::Token(token) => { 42 NodeOrToken::Token(token) => token.siblings_with_tokens(direction).skip(1).find(not_trivia),
49 token.siblings_with_tokens(direction).skip(1).find(not_trivia)
50 }
51 }; 43 };
52 44
53 fn not_trivia(element: &SyntaxElement) -> bool { 45 fn not_trivia(element: &SyntaxElement) -> bool {
54 match element { 46 match element {
55 SyntaxElement::Node(_) => true, 47 NodeOrToken::Node(_) => true,
56 SyntaxElement::Token(token) => !token.kind().is_trivia(), 48 NodeOrToken::Token(token) => !token.kind().is_trivia(),
57 } 49 }
58 } 50 }
59} 51}
60 52
61pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement { 53pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement {
62 SyntaxElement::new(root.0.covering_node(range)) 54 root.covering_element(range)
55}
56
57#[derive(Debug, PartialEq, Eq, Clone, Copy)]
58pub enum InsertPosition<T> {
59 First,
60 Last,
61 Before(T),
62 After(T),
63}
64
65/// Adds specified children (tokens or nodes) to the current node at the
66/// specific position.
67///
68/// This is a type-unsafe low-level editing API, if you need to use it,
69/// prefer to create a type-safe abstraction on top of it instead.
70pub fn insert_children(
71 parent: &SyntaxNode,
72 position: InsertPosition<SyntaxElement>,
73 to_insert: impl Iterator<Item = SyntaxElement>,
74) -> SyntaxNode {
75 let mut delta = TextUnit::default();
76 let to_insert = to_insert.map(|element| {
77 delta += element.text_range().len();
78 to_green_element(element)
79 });
80
81 let old_children = parent.green().children();
82
83 let new_children = match &position {
84 InsertPosition::First => {
85 to_insert.chain(old_children.iter().cloned()).collect::<Box<[_]>>()
86 }
87 InsertPosition::Last => old_children.iter().cloned().chain(to_insert).collect::<Box<[_]>>(),
88 InsertPosition::Before(anchor) | InsertPosition::After(anchor) => {
89 let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 };
90 let split_at = position_of_child(parent, anchor.clone()) + take_anchor;
91 let (before, after) = old_children.split_at(split_at);
92 before
93 .iter()
94 .cloned()
95 .chain(to_insert)
96 .chain(after.iter().cloned())
97 .collect::<Box<[_]>>()
98 }
99 };
100
101 with_children(parent, new_children)
102}
103
104/// Replaces all nodes in `to_delete` with nodes from `to_insert`
105///
106/// This is a type-unsafe low-level editing API, if you need to use it,
107/// prefer to create a type-safe abstraction on top of it instead.
108pub fn replace_children(
109 parent: &SyntaxNode,
110 to_delete: RangeInclusive<SyntaxElement>,
111 to_insert: impl Iterator<Item = SyntaxElement>,
112) -> SyntaxNode {
113 let start = position_of_child(parent, to_delete.start().clone());
114 let end = position_of_child(parent, to_delete.end().clone());
115 let old_children = parent.green().children();
116
117 let new_children = old_children[..start]
118 .iter()
119 .cloned()
120 .chain(to_insert.map(to_green_element))
121 .chain(old_children[end + 1..].iter().cloned())
122 .collect::<Box<[_]>>();
123 with_children(parent, new_children)
124}
125
126fn with_children(
127 parent: &SyntaxNode,
128 new_children: Box<[NodeOrToken<rowan::GreenNode, rowan::GreenToken>]>,
129) -> SyntaxNode {
130 let len = new_children.iter().map(|it| it.text_len()).sum::<TextUnit>();
131 let new_node =
132 rowan::GreenNode::new(rowan::cursor::SyntaxKind(parent.kind() as u16), new_children);
133 let new_root_node = parent.replace_with(new_node);
134 let new_root_node = SyntaxNode::new_root(new_root_node);
135
136 // FIXME: use a more elegant way to re-fetch the node (#1185), make
137 // `range` private afterwards
138 let mut ptr = SyntaxNodePtr::new(parent);
139 ptr.range = TextRange::offset_len(ptr.range().start(), len);
140 ptr.to_node(&new_root_node)
141}
142
143fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize {
144 parent
145 .children_with_tokens()
146 .position(|it| it == child)
147 .expect("element is not a child of current element")
148}
149
150fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
151 match element {
152 NodeOrToken::Node(it) => it.green().clone().into(),
153 NodeOrToken::Token(it) => it.green().clone().into(),
154 }
63} 155}
diff --git a/crates/ra_syntax/src/ast/expr_extensions.rs b/crates/ra_syntax/src/ast/expr_extensions.rs
index 139bd3ec0..f9190d877 100644
--- a/crates/ra_syntax/src/ast/expr_extensions.rs
+++ b/crates/ra_syntax/src/ast/expr_extensions.rs
@@ -2,7 +2,7 @@
2 2
3use crate::{ 3use crate::{
4 ast::{self, child_opt, children, AstChildren, AstNode}, 4 ast::{self, child_opt, children, AstChildren, AstNode},
5 SmolStr, SyntaxElement, 5 SmolStr,
6 SyntaxKind::*, 6 SyntaxKind::*,
7 SyntaxToken, T, 7 SyntaxToken, T,
8}; 8};
@@ -229,14 +229,11 @@ pub enum LiteralKind {
229 229
230impl ast::Literal { 230impl ast::Literal {
231 pub fn token(&self) -> SyntaxToken { 231 pub fn token(&self) -> SyntaxToken {
232 let elem = self 232 self.syntax()
233 .syntax()
234 .children_with_tokens() 233 .children_with_tokens()
235 .find(|e| e.kind() != ATTR && !e.kind().is_trivia()); 234 .find(|e| e.kind() != ATTR && !e.kind().is_trivia())
236 match elem { 235 .and_then(|e| e.into_token())
237 Some(SyntaxElement::Token(token)) => token, 236 .unwrap()
238 _ => unreachable!(),
239 }
240 } 237 }
241 238
242 pub fn kind(&self) -> LiteralKind { 239 pub fn kind(&self) -> LiteralKind {
diff --git a/crates/ra_syntax/src/ast/extensions.rs b/crates/ra_syntax/src/ast/extensions.rs
index 753fc42c6..d4873b39a 100644
--- a/crates/ra_syntax/src/ast/extensions.rs
+++ b/crates/ra_syntax/src/ast/extensions.rs
@@ -24,10 +24,7 @@ impl ast::NameRef {
24} 24}
25 25
26fn text_of_first_token(node: &SyntaxNode) -> &SmolStr { 26fn text_of_first_token(node: &SyntaxNode) -> &SmolStr {
27 match node.0.green().children().first() { 27 node.green().children().first().and_then(|it| it.as_token()).unwrap().text()
28 Some(rowan::GreenElement::Token(it)) => it.text(),
29 _ => panic!(),
30 }
31} 28}
32 29
33impl ast::Attr { 30impl ast::Attr {
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs
index 8af04c136..7f69b86e1 100644
--- a/crates/ra_syntax/src/lib.rs
+++ b/crates/ra_syntax/src/lib.rs
@@ -20,7 +20,6 @@
20//! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md> 20//! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md>
21 21
22mod syntax_node; 22mod syntax_node;
23mod syntax_text;
24mod syntax_error; 23mod syntax_error;
25mod parsing; 24mod parsing;
26mod validation; 25mod validation;
@@ -38,19 +37,17 @@ use ra_text_edit::AtomTextEdit;
38use crate::syntax_node::GreenNode; 37use crate::syntax_node::GreenNode;
39 38
40pub use crate::{ 39pub use crate::{
40 algo::InsertPosition,
41 ast::{AstNode, AstToken}, 41 ast::{AstNode, AstToken},
42 parsing::{classify_literal, tokenize, Token}, 42 parsing::{classify_literal, tokenize, Token},
43 ptr::{AstPtr, SyntaxNodePtr}, 43 ptr::{AstPtr, SyntaxNodePtr},
44 syntax_error::{Location, SyntaxError, SyntaxErrorKind}, 44 syntax_error::{Location, SyntaxError, SyntaxErrorKind},
45 syntax_node::{ 45 syntax_node::{
46 Direction, InsertPosition, SyntaxElement, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, 46 Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken, SyntaxTreeBuilder,
47 WalkEvent,
48 }, 47 },
49 syntax_text::SyntaxText,
50}; 48};
51pub use ra_parser::SyntaxKind; 49pub use ra_parser::{SyntaxKind, T};
52pub use ra_parser::T; 50pub use rowan::{SmolStr, SyntaxText, TextRange, TextUnit, TokenAtOffset, WalkEvent};
53pub use rowan::{SmolStr, TextRange, TextUnit};
54 51
55/// `Parse` is the result of the parsing: a syntax tree and a collection of 52/// `Parse` is the result of the parsing: a syntax tree and a collection of
56/// errors. 53/// errors.
@@ -76,7 +73,7 @@ impl<T> Parse<T> {
76 } 73 }
77 74
78 pub fn syntax_node(&self) -> SyntaxNode { 75 pub fn syntax_node(&self) -> SyntaxNode {
79 SyntaxNode::new(self.green.clone()) 76 SyntaxNode::new_root(self.green.clone())
80 } 77 }
81} 78}
82 79
@@ -146,18 +143,17 @@ impl Parse<SourceFile> {
146pub use crate::ast::SourceFile; 143pub use crate::ast::SourceFile;
147 144
148impl SourceFile { 145impl SourceFile {
149 fn new(green: GreenNode) -> SourceFile { 146 pub fn parse(text: &str) -> Parse<SourceFile> {
150 let root = SyntaxNode::new(green); 147 let (green, mut errors) = parsing::parse_text(text);
148 let root = SyntaxNode::new_root(green.clone());
149
151 if cfg!(debug_assertions) { 150 if cfg!(debug_assertions) {
152 validation::validate_block_structure(&root); 151 validation::validate_block_structure(&root);
153 } 152 }
154 assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
155 SourceFile::cast(root).unwrap()
156 }
157 153
158 pub fn parse(text: &str) -> Parse<SourceFile> { 154 errors.extend(validation::validate(&root));
159 let (green, mut errors) = parsing::parse_text(text); 155
160 errors.extend(validation::validate(&SourceFile::new(green.clone()))); 156 assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
161 Parse { green, errors: Arc::new(errors), _ty: PhantomData } 157 Parse { green, errors: Arc::new(errors), _ty: PhantomData }
162 } 158 }
163} 159}
@@ -267,8 +263,8 @@ fn api_walkthrough() {
267 match event { 263 match event {
268 WalkEvent::Enter(node) => { 264 WalkEvent::Enter(node) => {
269 let text = match &node { 265 let text = match &node {
270 SyntaxElement::Node(it) => it.text().to_string(), 266 NodeOrToken::Node(it) => it.text().to_string(),
271 SyntaxElement::Token(it) => it.text().to_string(), 267 NodeOrToken::Token(it) => it.text().to_string(),
272 }; 268 };
273 buf += &format!("{:indent$}{:?} {:?}\n", " ", text, node.kind(), indent = indent); 269 buf += &format!("{:indent$}{:?} {:?}\n", " ", text, node.kind(), indent = indent);
274 indent += 2; 270 indent += 2;
diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs
index 2f388bdfe..65b8aa10d 100644
--- a/crates/ra_syntax/src/parsing/reparsing.rs
+++ b/crates/ra_syntax/src/parsing/reparsing.rs
@@ -16,7 +16,7 @@ use crate::{
16 text_token_source::TextTokenSource, 16 text_token_source::TextTokenSource,
17 text_tree_sink::TextTreeSink, 17 text_tree_sink::TextTreeSink,
18 }, 18 },
19 syntax_node::{GreenNode, GreenToken, SyntaxElement, SyntaxNode}, 19 syntax_node::{GreenNode, GreenToken, NodeOrToken, SyntaxElement, SyntaxNode},
20 SyntaxError, 20 SyntaxError,
21 SyntaxKind::*, 21 SyntaxKind::*,
22 TextRange, TextUnit, T, 22 TextRange, TextUnit, T,
@@ -70,7 +70,8 @@ fn reparse_token<'node>(
70 } 70 }
71 } 71 }
72 72
73 let new_token = GreenToken::new(rowan::SyntaxKind(token.kind().into()), text.into()); 73 let new_token =
74 GreenToken::new(rowan::cursor::SyntaxKind(token.kind().into()), text.into());
74 Some((token.replace_with(new_token), token.text_range())) 75 Some((token.replace_with(new_token), token.text_range()))
75 } 76 }
76 _ => None, 77 _ => None,
@@ -98,8 +99,8 @@ fn get_text_after_edit(element: SyntaxElement, edit: &AtomTextEdit) -> String {
98 let edit = 99 let edit =
99 AtomTextEdit::replace(edit.delete - element.text_range().start(), edit.insert.clone()); 100 AtomTextEdit::replace(edit.delete - element.text_range().start(), edit.insert.clone());
100 let text = match element { 101 let text = match element {
101 SyntaxElement::Token(token) => token.text().to_string(), 102 NodeOrToken::Token(token) => token.text().to_string(),
102 SyntaxElement::Node(node) => node.text().to_string(), 103 NodeOrToken::Node(node) => node.text().to_string(),
103 }; 104 };
104 edit.apply(text) 105 edit.apply(text)
105} 106}
@@ -114,8 +115,8 @@ fn is_contextual_kw(text: &str) -> bool {
114fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> { 115fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> {
115 let node = algo::find_covering_element(node, range); 116 let node = algo::find_covering_element(node, range);
116 let mut ancestors = match node { 117 let mut ancestors = match node {
117 SyntaxElement::Token(it) => it.parent().ancestors(), 118 NodeOrToken::Token(it) => it.parent().ancestors(),
118 SyntaxElement::Node(it) => it.ancestors(), 119 NodeOrToken::Node(it) => it.ancestors(),
119 }; 120 };
120 ancestors.find_map(|node| { 121 ancestors.find_map(|node| {
121 let first_child = node.first_child_or_token().map(|it| it.kind()); 122 let first_child = node.first_child_or_token().map(|it| it.kind());
diff --git a/crates/ra_syntax/src/syntax_node.rs b/crates/ra_syntax/src/syntax_node.rs
index c42045d77..b2f5b8c64 100644
--- a/crates/ra_syntax/src/syntax_node.rs
+++ b/crates/ra_syntax/src/syntax_node.rs
@@ -6,487 +6,37 @@
6//! The *real* implementation is in the (language-agnostic) `rowan` crate, this 6//! The *real* implementation is in the (language-agnostic) `rowan` crate, this
7//! modules just wraps its API. 7//! modules just wraps its API.
8 8
9use std::{fmt, iter::successors, ops::RangeInclusive};
10
11use ra_parser::ParseError; 9use ra_parser::ParseError;
12use rowan::GreenNodeBuilder; 10use rowan::{GreenNodeBuilder, Language};
13 11
14use crate::{ 12use crate::{
15 syntax_error::{SyntaxError, SyntaxErrorKind}, 13 syntax_error::{SyntaxError, SyntaxErrorKind},
16 AstNode, Parse, SmolStr, SourceFile, SyntaxKind, SyntaxNodePtr, SyntaxText, TextRange, 14 Parse, SmolStr, SyntaxKind, TextUnit,
17 TextUnit,
18}; 15};
19 16
20pub use rowan::WalkEvent;
21pub(crate) use rowan::{GreenNode, GreenToken}; 17pub(crate) use rowan::{GreenNode, GreenToken};
22 18
23#[derive(Debug, PartialEq, Eq, Clone, Copy)] 19#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
24pub enum InsertPosition<T> { 20pub enum RustLanguage {}
25 First, 21impl Language for RustLanguage {
26 Last, 22 type Kind = SyntaxKind;
27 Before(T),
28 After(T),
29}
30
31#[derive(PartialEq, Eq, Hash, Clone)]
32pub struct SyntaxNode(pub(crate) rowan::cursor::SyntaxNode);
33 23
34impl fmt::Debug for SyntaxNode { 24 fn kind_from_raw(raw: rowan::cursor::SyntaxKind) -> SyntaxKind {
35 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 25 SyntaxKind::from(raw.0)
36 if f.alternate() {
37 let mut level = 0;
38 for event in self.preorder_with_tokens() {
39 match event {
40 WalkEvent::Enter(element) => {
41 for _ in 0..level {
42 write!(f, " ")?;
43 }
44 match element {
45 SyntaxElement::Node(node) => writeln!(f, "{:?}", node)?,
46 SyntaxElement::Token(token) => writeln!(f, "{:?}", token)?,
47 }
48 level += 1;
49 }
50 WalkEvent::Leave(_) => level -= 1,
51 }
52 }
53 assert_eq!(level, 0);
54 Ok(())
55 } else {
56 write!(f, "{:?}@{:?}", self.kind(), self.text_range())
57 }
58 } 26 }
59}
60 27
61impl fmt::Display for SyntaxNode { 28 fn kind_to_raw(kind: SyntaxKind) -> rowan::cursor::SyntaxKind {
62 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { 29 rowan::cursor::SyntaxKind(kind.into())
63 fmt::Display::fmt(&self.text(), fmt)
64 } 30 }
65} 31}
66 32
67#[derive(Debug, Clone, Copy, PartialEq, Eq)] 33pub type SyntaxNode = rowan::SyntaxNode<RustLanguage>;
68pub enum Direction { 34pub type SyntaxToken = rowan::SyntaxToken<RustLanguage>;
69 Next, 35pub type SyntaxElement = rowan::NodeOrToken<SyntaxNode, SyntaxToken>;
70 Prev, 36pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<RustLanguage>;
71} 37pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>;
72
73impl SyntaxNode {
74 pub(crate) fn new(green: GreenNode) -> SyntaxNode {
75 let inner = rowan::cursor::SyntaxNode::new_root(green);
76 SyntaxNode(inner)
77 }
78
79 pub fn kind(&self) -> SyntaxKind {
80 self.0.kind().0.into()
81 }
82
83 pub fn text_range(&self) -> TextRange {
84 self.0.text_range()
85 }
86
87 pub fn text(&self) -> SyntaxText {
88 SyntaxText::new(self.clone())
89 }
90
91 pub fn parent(&self) -> Option<SyntaxNode> {
92 self.0.parent().map(SyntaxNode)
93 }
94
95 pub fn first_child(&self) -> Option<SyntaxNode> {
96 self.0.first_child().map(SyntaxNode)
97 }
98
99 pub fn first_child_or_token(&self) -> Option<SyntaxElement> {
100 self.0.first_child_or_token().map(SyntaxElement::new)
101 }
102
103 pub fn last_child(&self) -> Option<SyntaxNode> {
104 self.0.last_child().map(SyntaxNode)
105 }
106
107 pub fn last_child_or_token(&self) -> Option<SyntaxElement> {
108 self.0.last_child_or_token().map(SyntaxElement::new)
109 }
110
111 pub fn next_sibling(&self) -> Option<SyntaxNode> {
112 self.0.next_sibling().map(SyntaxNode)
113 }
114
115 pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
116 self.0.next_sibling_or_token().map(SyntaxElement::new)
117 }
118
119 pub fn prev_sibling(&self) -> Option<SyntaxNode> {
120 self.0.prev_sibling().map(SyntaxNode)
121 }
122
123 pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
124 self.0.prev_sibling_or_token().map(SyntaxElement::new)
125 }
126
127 pub fn children(&self) -> SyntaxNodeChildren {
128 SyntaxNodeChildren(self.0.children())
129 }
130
131 pub fn children_with_tokens(&self) -> SyntaxElementChildren {
132 SyntaxElementChildren(self.0.children_with_tokens())
133 }
134
135 pub fn first_token(&self) -> Option<SyntaxToken> {
136 self.0.first_token().map(SyntaxToken)
137 }
138
139 pub fn last_token(&self) -> Option<SyntaxToken> {
140 self.0.last_token().map(SyntaxToken)
141 }
142 38
143 pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode> { 39pub use rowan::{Direction, NodeOrToken};
144 successors(Some(self.clone()), |node| node.parent())
145 }
146
147 pub fn descendants(&self) -> impl Iterator<Item = SyntaxNode> {
148 self.preorder().filter_map(|event| match event {
149 WalkEvent::Enter(node) => Some(node),
150 WalkEvent::Leave(_) => None,
151 })
152 }
153
154 pub fn descendants_with_tokens(&self) -> impl Iterator<Item = SyntaxElement> {
155 self.preorder_with_tokens().filter_map(|event| match event {
156 WalkEvent::Enter(it) => Some(it),
157 WalkEvent::Leave(_) => None,
158 })
159 }
160
161 pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = SyntaxNode> {
162 successors(Some(self.clone()), move |node| match direction {
163 Direction::Next => node.next_sibling(),
164 Direction::Prev => node.prev_sibling(),
165 })
166 }
167
168 pub fn siblings_with_tokens(
169 &self,
170 direction: Direction,
171 ) -> impl Iterator<Item = SyntaxElement> {
172 let me: SyntaxElement = self.clone().into();
173 successors(Some(me), move |el| match direction {
174 Direction::Next => el.next_sibling_or_token(),
175 Direction::Prev => el.prev_sibling_or_token(),
176 })
177 }
178
179 pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<SyntaxNode>> {
180 self.0.preorder().map(|event| match event {
181 WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode(n)),
182 WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxNode(n)),
183 })
184 }
185
186 pub fn preorder_with_tokens(&self) -> impl Iterator<Item = WalkEvent<SyntaxElement>> {
187 self.0.preorder_with_tokens().map(|event| match event {
188 WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxElement::new(n)),
189 WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxElement::new(n)),
190 })
191 }
192
193 pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode {
194 self.0.replace_with(replacement)
195 }
196
197 /// Adds specified children (tokens or nodes) to the current node at the
198 /// specific position.
199 ///
200 /// This is a type-unsafe low-level editing API, if you need to use it,
201 /// prefer to create a type-safe abstraction on top of it instead.
202 pub fn insert_children(
203 &self,
204 position: InsertPosition<SyntaxElement>,
205 to_insert: impl Iterator<Item = SyntaxElement>,
206 ) -> SyntaxNode {
207 let mut delta = TextUnit::default();
208 let to_insert = to_insert.map(|element| {
209 delta += element.text_len();
210 to_green_element(element)
211 });
212
213 let old_children = self.0.green().children();
214
215 let new_children = match &position {
216 InsertPosition::First => {
217 to_insert.chain(old_children.iter().cloned()).collect::<Box<[_]>>()
218 }
219 InsertPosition::Last => {
220 old_children.iter().cloned().chain(to_insert).collect::<Box<[_]>>()
221 }
222 InsertPosition::Before(anchor) | InsertPosition::After(anchor) => {
223 let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 };
224 let split_at = self.position_of_child(anchor.clone()) + take_anchor;
225 let (before, after) = old_children.split_at(split_at);
226 before
227 .iter()
228 .cloned()
229 .chain(to_insert)
230 .chain(after.iter().cloned())
231 .collect::<Box<[_]>>()
232 }
233 };
234
235 self.with_children(new_children)
236 }
237
238 /// Replaces all nodes in `to_delete` with nodes from `to_insert`
239 ///
240 /// This is a type-unsafe low-level editing API, if you need to use it,
241 /// prefer to create a type-safe abstraction on top of it instead.
242 pub fn replace_children(
243 &self,
244 to_delete: RangeInclusive<SyntaxElement>,
245 to_insert: impl Iterator<Item = SyntaxElement>,
246 ) -> SyntaxNode {
247 let start = self.position_of_child(to_delete.start().clone());
248 let end = self.position_of_child(to_delete.end().clone());
249 let old_children = self.0.green().children();
250
251 let new_children = old_children[..start]
252 .iter()
253 .cloned()
254 .chain(to_insert.map(to_green_element))
255 .chain(old_children[end + 1..].iter().cloned())
256 .collect::<Box<[_]>>();
257 self.with_children(new_children)
258 }
259
260 fn with_children(&self, new_children: Box<[rowan::GreenElement]>) -> SyntaxNode {
261 let len = new_children.iter().map(|it| it.text_len()).sum::<TextUnit>();
262 let new_node = GreenNode::new(rowan::SyntaxKind(self.kind() as u16), new_children);
263 let new_file_node = self.replace_with(new_node);
264 let file = SourceFile::new(new_file_node);
265
266 // FIXME: use a more elegant way to re-fetch the node (#1185), make
267 // `range` private afterwards
268 let mut ptr = SyntaxNodePtr::new(self);
269 ptr.range = TextRange::offset_len(ptr.range().start(), len);
270 ptr.to_node(file.syntax()).to_owned()
271 }
272
273 fn position_of_child(&self, child: SyntaxElement) -> usize {
274 self.children_with_tokens()
275 .position(|it| it == child)
276 .expect("element is not a child of current element")
277 }
278}
279
280fn to_green_element(element: SyntaxElement) -> rowan::GreenElement {
281 match element {
282 SyntaxElement::Node(node) => node.0.green().clone().into(),
283 SyntaxElement::Token(tok) => {
284 GreenToken::new(rowan::SyntaxKind(tok.kind() as u16), tok.text().clone()).into()
285 }
286 }
287}
288
289#[derive(Clone, PartialEq, Eq, Hash)]
290pub struct SyntaxToken(pub(crate) rowan::cursor::SyntaxToken);
291
292impl fmt::Debug for SyntaxToken {
293 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
294 write!(fmt, "{:?}@{:?}", self.kind(), self.text_range())?;
295 if self.text().len() < 25 {
296 return write!(fmt, " {:?}", self.text());
297 }
298 let text = self.text().as_str();
299 for idx in 21..25 {
300 if text.is_char_boundary(idx) {
301 let text = format!("{} ...", &text[..idx]);
302 return write!(fmt, " {:?}", text);
303 }
304 }
305 unreachable!()
306 }
307}
308
309impl fmt::Display for SyntaxToken {
310 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
311 fmt::Display::fmt(self.text(), fmt)
312 }
313}
314
315impl SyntaxToken {
316 pub fn kind(&self) -> SyntaxKind {
317 self.0.kind().0.into()
318 }
319
320 pub fn text(&self) -> &SmolStr {
321 self.0.text()
322 }
323
324 pub fn text_range(&self) -> TextRange {
325 self.0.text_range()
326 }
327
328 pub fn parent(&self) -> SyntaxNode {
329 SyntaxNode(self.0.parent())
330 }
331
332 pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
333 self.0.next_sibling_or_token().map(SyntaxElement::new)
334 }
335
336 pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
337 self.0.prev_sibling_or_token().map(SyntaxElement::new)
338 }
339
340 pub fn siblings_with_tokens(
341 &self,
342 direction: Direction,
343 ) -> impl Iterator<Item = SyntaxElement> {
344 let me: SyntaxElement = self.clone().into();
345 successors(Some(me), move |el| match direction {
346 Direction::Next => el.next_sibling_or_token(),
347 Direction::Prev => el.prev_sibling_or_token(),
348 })
349 }
350
351 pub fn next_token(&self) -> Option<SyntaxToken> {
352 self.0.next_token().map(SyntaxToken)
353 }
354
355 pub fn prev_token(&self) -> Option<SyntaxToken> {
356 self.0.prev_token().map(SyntaxToken)
357 }
358
359 pub(crate) fn replace_with(&self, new_token: GreenToken) -> GreenNode {
360 self.0.replace_with(new_token)
361 }
362}
363
364#[derive(Debug, PartialEq, Eq, Hash, Clone)]
365pub enum SyntaxElement {
366 Node(SyntaxNode),
367 Token(SyntaxToken),
368}
369
370impl From<SyntaxNode> for SyntaxElement {
371 fn from(node: SyntaxNode) -> Self {
372 SyntaxElement::Node(node)
373 }
374}
375
376impl From<SyntaxToken> for SyntaxElement {
377 fn from(token: SyntaxToken) -> Self {
378 SyntaxElement::Token(token)
379 }
380}
381
382impl fmt::Display for SyntaxElement {
383 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
384 match self {
385 SyntaxElement::Node(it) => fmt::Display::fmt(it, fmt),
386 SyntaxElement::Token(it) => fmt::Display::fmt(it, fmt),
387 }
388 }
389}
390
391impl SyntaxElement {
392 pub(crate) fn new(el: rowan::cursor::SyntaxElement) -> Self {
393 match el {
394 rowan::cursor::SyntaxElement::Node(it) => SyntaxElement::Node(SyntaxNode(it)),
395 rowan::cursor::SyntaxElement::Token(it) => SyntaxElement::Token(SyntaxToken(it)),
396 }
397 }
398
399 pub fn kind(&self) -> SyntaxKind {
400 match self {
401 SyntaxElement::Node(it) => it.kind(),
402 SyntaxElement::Token(it) => it.kind(),
403 }
404 }
405
406 pub fn as_node(&self) -> Option<&SyntaxNode> {
407 match self {
408 SyntaxElement::Node(node) => Some(node),
409 SyntaxElement::Token(_) => None,
410 }
411 }
412
413 pub fn into_node(self) -> Option<SyntaxNode> {
414 match self {
415 SyntaxElement::Node(node) => Some(node),
416 SyntaxElement::Token(_) => None,
417 }
418 }
419
420 pub fn as_token(&self) -> Option<&SyntaxToken> {
421 match self {
422 SyntaxElement::Node(_) => None,
423 SyntaxElement::Token(token) => Some(token),
424 }
425 }
426
427 pub fn into_token(self) -> Option<SyntaxToken> {
428 match self {
429 SyntaxElement::Node(_) => None,
430 SyntaxElement::Token(token) => Some(token),
431 }
432 }
433
434 pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
435 match self {
436 SyntaxElement::Node(it) => it.next_sibling_or_token(),
437 SyntaxElement::Token(it) => it.next_sibling_or_token(),
438 }
439 }
440
441 pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
442 match self {
443 SyntaxElement::Node(it) => it.prev_sibling_or_token(),
444 SyntaxElement::Token(it) => it.prev_sibling_or_token(),
445 }
446 }
447
448 pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode> {
449 match self {
450 SyntaxElement::Node(it) => it.clone(),
451 SyntaxElement::Token(it) => it.parent(),
452 }
453 .ancestors()
454 }
455
456 pub fn text_range(&self) -> TextRange {
457 match self {
458 SyntaxElement::Node(it) => it.text_range(),
459 SyntaxElement::Token(it) => it.text_range(),
460 }
461 }
462
463 fn text_len(&self) -> TextUnit {
464 match self {
465 SyntaxElement::Node(node) => node.0.green().text_len(),
466 SyntaxElement::Token(token) => TextUnit::of_str(token.0.text()),
467 }
468 }
469}
470
471#[derive(Clone, Debug)]
472pub struct SyntaxNodeChildren(rowan::cursor::SyntaxNodeChildren);
473
474impl Iterator for SyntaxNodeChildren {
475 type Item = SyntaxNode;
476 fn next(&mut self) -> Option<SyntaxNode> {
477 self.0.next().map(SyntaxNode)
478 }
479}
480
481#[derive(Clone, Debug)]
482pub struct SyntaxElementChildren(rowan::cursor::SyntaxElementChildren);
483
484impl Iterator for SyntaxElementChildren {
485 type Item = SyntaxElement;
486 fn next(&mut self) -> Option<SyntaxElement> {
487 self.0.next().map(SyntaxElement::new)
488 }
489}
490 40
491pub struct SyntaxTreeBuilder { 41pub struct SyntaxTreeBuilder {
492 errors: Vec<SyntaxError>, 42 errors: Vec<SyntaxError>,
@@ -507,19 +57,21 @@ impl SyntaxTreeBuilder {
507 57
508 pub fn finish(self) -> Parse<SyntaxNode> { 58 pub fn finish(self) -> Parse<SyntaxNode> {
509 let (green, errors) = self.finish_raw(); 59 let (green, errors) = self.finish_raw();
510 let node = SyntaxNode::new(green); 60 let node = SyntaxNode::new_root(green);
511 if cfg!(debug_assertions) { 61 if cfg!(debug_assertions) {
512 crate::validation::validate_block_structure(&node); 62 crate::validation::validate_block_structure(&node);
513 } 63 }
514 Parse::new(node.0.green().clone(), errors) 64 Parse::new(node.green().clone(), errors)
515 } 65 }
516 66
517 pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) { 67 pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) {
518 self.inner.token(rowan::SyntaxKind(kind.into()), text) 68 let kind = RustLanguage::kind_to_raw(kind);
69 self.inner.token(kind, text)
519 } 70 }
520 71
521 pub fn start_node(&mut self, kind: SyntaxKind) { 72 pub fn start_node(&mut self, kind: SyntaxKind) {
522 self.inner.start_node(rowan::SyntaxKind(kind.into())) 73 let kind = RustLanguage::kind_to_raw(kind);
74 self.inner.start_node(kind)
523 } 75 }
524 76
525 pub fn finish_node(&mut self) { 77 pub fn finish_node(&mut self) {
diff --git a/crates/ra_syntax/src/syntax_text.rs b/crates/ra_syntax/src/syntax_text.rs
deleted file mode 100644
index 652cb7a1e..000000000
--- a/crates/ra_syntax/src/syntax_text.rs
+++ /dev/null
@@ -1,178 +0,0 @@
1use std::{
2 fmt,
3 ops::{self, Bound},
4};
5
6use crate::{SmolStr, SyntaxElement, SyntaxNode, TextRange, TextUnit};
7
8#[derive(Clone)]
9pub struct SyntaxText {
10 node: SyntaxNode,
11 range: TextRange,
12}
13
14impl SyntaxText {
15 pub(crate) fn new(node: SyntaxNode) -> SyntaxText {
16 let range = node.text_range();
17 SyntaxText { node, range }
18 }
19
20 pub fn try_fold_chunks<T, F, E>(&self, init: T, mut f: F) -> Result<T, E>
21 where
22 F: FnMut(T, &str) -> Result<T, E>,
23 {
24 self.node.descendants_with_tokens().try_fold(init, move |acc, element| {
25 let res = match element {
26 SyntaxElement::Token(token) => {
27 let range = match self.range.intersection(&token.text_range()) {
28 None => return Ok(acc),
29 Some(it) => it,
30 };
31 let slice = if range == token.text_range() {
32 token.text()
33 } else {
34 let range = range - token.text_range().start();
35 &token.text()[range]
36 };
37 f(acc, slice)?
38 }
39 SyntaxElement::Node(_) => acc,
40 };
41 Ok(res)
42 })
43 }
44
45 pub fn try_for_each_chunk<F: FnMut(&str) -> Result<(), E>, E>(
46 &self,
47 mut f: F,
48 ) -> Result<(), E> {
49 self.try_fold_chunks((), move |(), chunk| f(chunk))
50 }
51
52 pub fn for_each_chunk<F: FnMut(&str)>(&self, mut f: F) {
53 enum Void {}
54 match self.try_for_each_chunk(|chunk| Ok::<(), Void>(f(chunk))) {
55 Ok(()) => (),
56 Err(void) => match void {},
57 }
58 }
59
60 pub fn to_smol_string(&self) -> SmolStr {
61 self.to_string().into()
62 }
63
64 pub fn contains_char(&self, c: char) -> bool {
65 self.try_for_each_chunk(|chunk| if chunk.contains(c) { Err(()) } else { Ok(()) }).is_err()
66 }
67
68 pub fn find_char(&self, c: char) -> Option<TextUnit> {
69 let mut acc: TextUnit = 0.into();
70 let res = self.try_for_each_chunk(|chunk| {
71 if let Some(pos) = chunk.find(c) {
72 let pos: TextUnit = (pos as u32).into();
73 return Err(acc + pos);
74 }
75 acc += TextUnit::of_str(chunk);
76 Ok(())
77 });
78 found(res)
79 }
80
81 pub fn len(&self) -> TextUnit {
82 self.range.len()
83 }
84
85 pub fn is_empty(&self) -> bool {
86 self.range.is_empty()
87 }
88
89 pub fn slice(&self, range: impl ops::RangeBounds<TextUnit>) -> SyntaxText {
90 let start = match range.start_bound() {
91 Bound::Included(&b) => b,
92 Bound::Excluded(_) => panic!("utf-aware slicing can't work this way"),
93 Bound::Unbounded => 0.into(),
94 };
95 let end = match range.end_bound() {
96 Bound::Included(_) => panic!("utf-aware slicing can't work this way"),
97 Bound::Excluded(&b) => b,
98 Bound::Unbounded => self.len(),
99 };
100 assert!(start <= end);
101 let len = end - start;
102 let start = self.range.start() + start;
103 let end = start + len;
104 assert!(
105 start <= end,
106 "invalid slice, range: {:?}, slice: {:?}",
107 self.range,
108 (range.start_bound(), range.end_bound()),
109 );
110 let range = TextRange::from_to(start, end);
111 assert!(
112 range.is_subrange(&self.range),
113 "invalid slice, range: {:?}, slice: {:?}",
114 self.range,
115 range,
116 );
117 SyntaxText { node: self.node.clone(), range }
118 }
119
120 pub fn char_at(&self, offset: impl Into<TextUnit>) -> Option<char> {
121 let offset = offset.into();
122 let mut start: TextUnit = 0.into();
123 let res = self.try_for_each_chunk(|chunk| {
124 let end = start + TextUnit::of_str(chunk);
125 if start <= offset && offset < end {
126 let off: usize = u32::from(offset - start) as usize;
127 return Err(chunk[off..].chars().next().unwrap());
128 }
129 start = end;
130 Ok(())
131 });
132 found(res)
133 }
134}
135
136fn found<T>(res: Result<(), T>) -> Option<T> {
137 match res {
138 Ok(()) => None,
139 Err(it) => Some(it),
140 }
141}
142
143impl fmt::Debug for SyntaxText {
144 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
145 fmt::Debug::fmt(&self.to_string(), f)
146 }
147}
148
149impl fmt::Display for SyntaxText {
150 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
151 self.try_for_each_chunk(|chunk| fmt::Display::fmt(chunk, f))
152 }
153}
154
155impl From<SyntaxText> for String {
156 fn from(text: SyntaxText) -> String {
157 text.to_string()
158 }
159}
160
161impl PartialEq<str> for SyntaxText {
162 fn eq(&self, mut rhs: &str) -> bool {
163 self.try_for_each_chunk(|chunk| {
164 if !rhs.starts_with(chunk) {
165 return Err(());
166 }
167 rhs = &rhs[chunk.len()..];
168 Ok(())
169 })
170 .is_ok()
171 }
172}
173
174impl PartialEq<&'_ str> for SyntaxText {
175 fn eq(&self, rhs: &&str) -> bool {
176 self == *rhs
177 }
178}
diff --git a/crates/ra_syntax/src/validation.rs b/crates/ra_syntax/src/validation.rs
index 19bdafef2..e03c02d1b 100644
--- a/crates/ra_syntax/src/validation.rs
+++ b/crates/ra_syntax/src/validation.rs
@@ -5,16 +5,16 @@ mod field_expr;
5 5
6use crate::{ 6use crate::{
7 algo::visit::{visitor_ctx, VisitorCtx}, 7 algo::visit::{visitor_ctx, VisitorCtx},
8 ast, AstNode, SourceFile, SyntaxError, 8 ast, SyntaxError,
9 SyntaxKind::{BYTE, BYTE_STRING, CHAR, STRING}, 9 SyntaxKind::{BYTE, BYTE_STRING, CHAR, STRING},
10 SyntaxNode, TextUnit, T, 10 SyntaxNode, TextUnit, T,
11}; 11};
12 12
13pub(crate) use unescape::EscapeError; 13pub(crate) use unescape::EscapeError;
14 14
15pub(crate) fn validate(file: &SourceFile) -> Vec<SyntaxError> { 15pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> {
16 let mut errors = Vec::new(); 16 let mut errors = Vec::new();
17 for node in file.syntax().descendants() { 17 for node in root.descendants() {
18 let _ = visitor_ctx(&mut errors) 18 let _ = visitor_ctx(&mut errors)
19 .visit::<ast::Literal, _>(validate_literal) 19 .visit::<ast::Literal, _>(validate_literal)
20 .visit::<ast::Block, _>(block::validate_block_node) 20 .visit::<ast::Block, _>(block::validate_block_node)