aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/gen_lsp_server/Cargo.toml2
-rw-r--r--crates/ra_assists/src/flip_comma.rs3
-rw-r--r--crates/ra_assists/src/lib.rs6
-rw-r--r--crates/ra_cli/src/main.rs6
-rw-r--r--crates/ra_hir/src/generics.rs13
-rw-r--r--crates/ra_hir/src/ty.rs86
-rw-r--r--crates/ra_hir/src/ty/snapshots/tests__infer_struct.snap6
-rw-r--r--crates/ra_hir/src/ty/snapshots/tests__infer_tuple_struct_generics.snap23
-rw-r--r--crates/ra_hir/src/ty/tests.rs21
-rw-r--r--crates/ra_ide_api/src/hover.rs17
-rw-r--r--crates/ra_ide_api/src/lib.rs5
-rw-r--r--crates/ra_ide_api_light/src/join_lines.rs158
-rw-r--r--crates/ra_ide_api_light/src/lib.rs4
-rw-r--r--crates/ra_lsp_server/Cargo.toml2
-rw-r--r--crates/ra_lsp_server/src/main_loop/handlers.rs2
-rw-r--r--crates/ra_parser/Cargo.toml9
-rw-r--r--crates/ra_parser/src/event.rs127
-rw-r--r--crates/ra_parser/src/grammar.rs (renamed from crates/ra_syntax/src/parsing/grammar.rs)34
-rw-r--r--crates/ra_parser/src/grammar/attributes.rs (renamed from crates/ra_syntax/src/parsing/grammar/attributes.rs)0
-rw-r--r--crates/ra_parser/src/grammar/expressions.rs (renamed from crates/ra_syntax/src/parsing/grammar/expressions.rs)0
-rw-r--r--crates/ra_parser/src/grammar/expressions/atom.rs (renamed from crates/ra_syntax/src/parsing/grammar/expressions/atom.rs)0
-rw-r--r--crates/ra_parser/src/grammar/items.rs (renamed from crates/ra_syntax/src/parsing/grammar/items.rs)0
-rw-r--r--crates/ra_parser/src/grammar/items/consts.rs (renamed from crates/ra_syntax/src/parsing/grammar/items/consts.rs)0
-rw-r--r--crates/ra_parser/src/grammar/items/nominal.rs (renamed from crates/ra_syntax/src/parsing/grammar/items/nominal.rs)0
-rw-r--r--crates/ra_parser/src/grammar/items/traits.rs (renamed from crates/ra_syntax/src/parsing/grammar/items/traits.rs)0
-rw-r--r--crates/ra_parser/src/grammar/items/use_item.rs (renamed from crates/ra_syntax/src/parsing/grammar/items/use_item.rs)0
-rw-r--r--crates/ra_parser/src/grammar/params.rs (renamed from crates/ra_syntax/src/parsing/grammar/params.rs)0
-rw-r--r--crates/ra_parser/src/grammar/paths.rs (renamed from crates/ra_syntax/src/parsing/grammar/paths.rs)0
-rw-r--r--crates/ra_parser/src/grammar/patterns.rs (renamed from crates/ra_syntax/src/parsing/grammar/patterns.rs)0
-rw-r--r--crates/ra_parser/src/grammar/type_args.rs (renamed from crates/ra_syntax/src/parsing/grammar/type_args.rs)0
-rw-r--r--crates/ra_parser/src/grammar/type_params.rs (renamed from crates/ra_syntax/src/parsing/grammar/type_params.rs)0
-rw-r--r--crates/ra_parser/src/grammar/types.rs (renamed from crates/ra_syntax/src/parsing/grammar/types.rs)0
-rw-r--r--crates/ra_parser/src/lib.rs88
-rw-r--r--crates/ra_parser/src/parser.rs (renamed from crates/ra_syntax/src/parsing/parser.rs)7
-rw-r--r--crates/ra_parser/src/syntax_kind.rs (renamed from crates/ra_syntax/src/syntax_kinds.rs)4
-rw-r--r--crates/ra_parser/src/syntax_kind/generated.rs (renamed from crates/ra_syntax/src/syntax_kinds/generated.rs)4
-rw-r--r--crates/ra_parser/src/syntax_kind/generated.rs.tera (renamed from crates/ra_syntax/src/syntax_kinds/generated.rs.tera)4
-rw-r--r--crates/ra_parser/src/token_set.rs (renamed from crates/ra_syntax/src/parsing/token_set.rs)3
-rw-r--r--crates/ra_syntax/Cargo.toml1
-rw-r--r--crates/ra_syntax/fuzz/.gitignore2
-rw-r--r--crates/ra_syntax/fuzz/Cargo.lock520
-rw-r--r--crates/ra_syntax/fuzz/fuzz_targets/parser.rs2
-rw-r--r--crates/ra_syntax/src/algo.rs8
-rw-r--r--crates/ra_syntax/src/ast.rs1
-rw-r--r--crates/ra_syntax/src/lib.rs214
-rw-r--r--crates/ra_syntax/src/parsing.rs72
-rw-r--r--crates/ra_syntax/src/parsing/builder.rs165
-rw-r--r--crates/ra_syntax/src/parsing/event.rs247
-rw-r--r--crates/ra_syntax/src/parsing/input.rs47
-rw-r--r--crates/ra_syntax/src/parsing/reparsing.rs43
-rw-r--r--crates/ra_syntax/src/syntax_error.rs4
-rw-r--r--crates/ra_syntax/src/syntax_node.rs184
-rw-r--r--crates/ra_syntax/src/utils.rs83
-rw-r--r--crates/ra_syntax/src/validation.rs41
-rw-r--r--crates/ra_syntax/tests/test.rs9
-rw-r--r--crates/tools/src/lib.rs4
56 files changed, 1115 insertions, 1166 deletions
diff --git a/crates/gen_lsp_server/Cargo.toml b/crates/gen_lsp_server/Cargo.toml
index a252c911c..9e0d819d0 100644
--- a/crates/gen_lsp_server/Cargo.toml
+++ b/crates/gen_lsp_server/Cargo.toml
@@ -8,7 +8,7 @@ license = "MIT OR Apache-2.0"
8description = "Generic LSP server scaffold." 8description = "Generic LSP server scaffold."
9 9
10[dependencies] 10[dependencies]
11lsp-types = "0.55.0" 11lsp-types = "0.56.0"
12log = "0.4.3" 12log = "0.4.3"
13failure = "0.1.4" 13failure = "0.1.4"
14serde_json = "1.0.34" 14serde_json = "1.0.34"
diff --git a/crates/ra_assists/src/flip_comma.rs b/crates/ra_assists/src/flip_comma.rs
index 08644d720..0d4a789fc 100644
--- a/crates/ra_assists/src/flip_comma.rs
+++ b/crates/ra_assists/src/flip_comma.rs
@@ -2,9 +2,10 @@ use hir::db::HirDatabase;
2use ra_syntax::{ 2use ra_syntax::{
3 Direction, 3 Direction,
4 SyntaxKind::COMMA, 4 SyntaxKind::COMMA,
5 algo::non_trivia_sibling,
5}; 6};
6 7
7use crate::{AssistCtx, Assist, non_trivia_sibling}; 8use crate::{AssistCtx, Assist};
8 9
9pub(crate) fn flip_comma(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { 10pub(crate) fn flip_comma(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
10 let comma = ctx.leaf_at_offset().find(|leaf| leaf.kind() == COMMA)?; 11 let comma = ctx.leaf_at_offset().find(|leaf| leaf.kind() == COMMA)?;
diff --git a/crates/ra_assists/src/lib.rs b/crates/ra_assists/src/lib.rs
index 7bd9b5ae6..e1e899edc 100644
--- a/crates/ra_assists/src/lib.rs
+++ b/crates/ra_assists/src/lib.rs
@@ -10,7 +10,7 @@ mod assist_ctx;
10use itertools::Itertools; 10use itertools::Itertools;
11 11
12use ra_text_edit::TextEdit; 12use ra_text_edit::TextEdit;
13use ra_syntax::{TextRange, TextUnit, SyntaxNode, Direction}; 13use ra_syntax::{TextRange, TextUnit};
14use ra_db::FileRange; 14use ra_db::FileRange;
15use hir::db::HirDatabase; 15use hir::db::HirDatabase;
16 16
@@ -104,10 +104,6 @@ fn all_assists<DB: HirDatabase>() -> &'static [fn(AssistCtx<DB>) -> Option<Assis
104 ] 104 ]
105} 105}
106 106
107fn non_trivia_sibling(node: &SyntaxNode, direction: Direction) -> Option<&SyntaxNode> {
108 node.siblings(direction).skip(1).find(|node| !node.kind().is_trivia())
109}
110
111#[cfg(test)] 107#[cfg(test)]
112mod helpers { 108mod helpers {
113 use hir::mock::MockDatabase; 109 use hir::mock::MockDatabase;
diff --git a/crates/ra_cli/src/main.rs b/crates/ra_cli/src/main.rs
index 72e6ae4d5..294f4b8af 100644
--- a/crates/ra_cli/src/main.rs
+++ b/crates/ra_cli/src/main.rs
@@ -4,7 +4,7 @@ use std::{fs, io::Read, path::Path, time::Instant};
4 4
5use clap::{App, Arg, SubCommand}; 5use clap::{App, Arg, SubCommand};
6use join_to_string::join; 6use join_to_string::join;
7use ra_ide_api_light::{extend_selection, file_structure, syntax_tree}; 7use ra_ide_api_light::{extend_selection, file_structure};
8use ra_syntax::{SourceFile, TextRange, TreeArc, AstNode}; 8use ra_syntax::{SourceFile, TextRange, TreeArc, AstNode};
9use tools::collect_tests; 9use tools::collect_tests;
10use flexi_logger::Logger; 10use flexi_logger::Logger;
@@ -37,7 +37,7 @@ fn main() -> Result<()> {
37 let file = file()?; 37 let file = file()?;
38 let elapsed = start.elapsed(); 38 let elapsed = start.elapsed();
39 if !matches.is_present("no-dump") { 39 if !matches.is_present("no-dump") {
40 println!("{}", syntax_tree(&file)); 40 println!("{}", file.syntax().debug_dump());
41 } 41 }
42 eprintln!("parsing: {:?}", elapsed); 42 eprintln!("parsing: {:?}", elapsed);
43 ::std::mem::forget(file); 43 ::std::mem::forget(file);
@@ -94,7 +94,7 @@ fn render_test(file: &Path, line: usize) -> Result<(String, String)> {
94 Some((_start_line, test)) => test, 94 Some((_start_line, test)) => test,
95 }; 95 };
96 let file = SourceFile::parse(&test.text); 96 let file = SourceFile::parse(&test.text);
97 let tree = syntax_tree(&file); 97 let tree = file.syntax().debug_dump();
98 Ok((test.text, tree)) 98 Ok((test.text, tree))
99} 99}
100 100
diff --git a/crates/ra_hir/src/generics.rs b/crates/ra_hir/src/generics.rs
index fcc513353..c494beeb0 100644
--- a/crates/ra_hir/src/generics.rs
+++ b/crates/ra_hir/src/generics.rs
@@ -87,4 +87,17 @@ impl GenericParams {
87 let parent_count = self.count_parent_params(); 87 let parent_count = self.count_parent_params();
88 parent_count + self.params.len() 88 parent_count + self.params.len()
89 } 89 }
90
91 fn for_each_param<'a>(&'a self, f: &mut impl FnMut(&'a GenericParam)) {
92 if let Some(parent) = &self.parent_params {
93 parent.for_each_param(f);
94 }
95 self.params.iter().for_each(f);
96 }
97
98 pub fn params_including_parent(&self) -> Vec<&GenericParam> {
99 let mut vec = Vec::with_capacity(self.count_params_including_parent());
100 self.for_each_param(&mut |p| vec.push(p));
101 vec
102 }
90} 103}
diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs
index 89e854dd7..ae595c16d 100644
--- a/crates/ra_hir/src/ty.rs
+++ b/crates/ra_hir/src/ty.rs
@@ -40,7 +40,7 @@ use crate::{
40 name::{KnownName}, 40 name::{KnownName},
41 expr::{Body, Expr, BindingAnnotation, Literal, ExprId, Pat, PatId, UnaryOp, BinaryOp, Statement, FieldPat, self}, 41 expr::{Body, Expr, BindingAnnotation, Literal, ExprId, Pat, PatId, UnaryOp, BinaryOp, Statement, FieldPat, self},
42 generics::GenericParams, 42 generics::GenericParams,
43 path::GenericArg, 43 path::{ GenericArgs, GenericArg},
44 adt::VariantDef, 44 adt::VariantDef,
45 resolve::{Resolver, Resolution}, nameres::Namespace 45 resolve::{Resolver, Resolution}, nameres::Namespace
46}; 46};
@@ -165,17 +165,6 @@ impl Substs {
165 pub fn empty() -> Substs { 165 pub fn empty() -> Substs {
166 Substs(Arc::new([])) 166 Substs(Arc::new([]))
167 } 167 }
168
169 /// Replaces the end of the substitutions by other ones.
170 pub(crate) fn replace_tail(self, replace_by: Vec<Ty>) -> Substs {
171 // again missing Arc::make_mut_slice...
172 let len = replace_by.len().min(self.0.len());
173 let parent_len = self.0.len() - len;
174 let mut result = Vec::with_capacity(parent_len + len);
175 result.extend(self.0.iter().take(parent_len).cloned());
176 result.extend(replace_by);
177 Substs(result.into())
178 }
179} 168}
180 169
181/// A type. This is based on the `TyKind` enum in rustc (librustc/ty/sty.rs). 170/// A type. This is based on the `TyKind` enum in rustc (librustc/ty/sty.rs).
@@ -454,7 +443,7 @@ impl Ty {
454 for _ in supplied_params..def_generics.count_params_including_parent() { 443 for _ in supplied_params..def_generics.count_params_including_parent() {
455 substs.push(Ty::Unknown); 444 substs.push(Ty::Unknown);
456 } 445 }
457 assert_eq!(substs.len(), def_generics.params.len()); 446 assert_eq!(substs.len(), def_generics.count_params_including_parent());
458 Substs(substs.into()) 447 Substs(substs.into())
459 } 448 }
460 449
@@ -639,8 +628,11 @@ impl fmt::Display for Ty {
639 join(sig.input.iter()).surround_with("fn(", ")").separator(", ").to_fmt(f)?; 628 join(sig.input.iter()).surround_with("fn(", ")").separator(", ").to_fmt(f)?;
640 write!(f, " -> {}", sig.output) 629 write!(f, " -> {}", sig.output)
641 } 630 }
642 Ty::FnDef { name, substs, sig, .. } => { 631 Ty::FnDef { def, name, substs, sig, .. } => {
643 write!(f, "fn {}", name)?; 632 match def {
633 CallableDef::Function(_) => write!(f, "fn {}", name)?,
634 CallableDef::Struct(_) | CallableDef::EnumVariant(_) => write!(f, "{}", name)?,
635 }
644 if substs.0.len() > 0 { 636 if substs.0.len() > 0 {
645 join(substs.0.iter()).surround_with("<", ">").separator(", ").to_fmt(f)?; 637 join(substs.0.iter()).surround_with("<", ">").separator(", ").to_fmt(f)?;
646 } 638 }
@@ -712,16 +704,18 @@ fn type_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariant) ->
712 .iter() 704 .iter()
713 .map(|(_, field)| Ty::from_hir(db, &resolver, &field.type_ref)) 705 .map(|(_, field)| Ty::from_hir(db, &resolver, &field.type_ref))
714 .collect::<Vec<_>>(); 706 .collect::<Vec<_>>();
715 let output = type_for_enum(db, def.parent_enum(db));
716 let sig = Arc::new(FnSig { input, output });
717 let substs = make_substs(&generics); 707 let substs = make_substs(&generics);
708 let output = type_for_enum(db, def.parent_enum(db)).apply_substs(substs.clone());
709 let sig = Arc::new(FnSig { input, output });
718 Ty::FnDef { def: def.into(), sig, name, substs } 710 Ty::FnDef { def: def.into(), sig, name, substs }
719} 711}
720 712
721fn make_substs(generics: &GenericParams) -> Substs { 713fn make_substs(generics: &GenericParams) -> Substs {
722 Substs( 714 Substs(
723 (0..generics.count_params_including_parent()) 715 generics
724 .map(|_p| Ty::Unknown) 716 .params_including_parent()
717 .into_iter()
718 .map(|p| Ty::Param { idx: p.idx, name: p.name.clone() })
725 .collect::<Vec<_>>() 719 .collect::<Vec<_>>()
726 .into(), 720 .into(),
727 ) 721 )
@@ -736,7 +730,7 @@ fn type_for_struct(db: &impl HirDatabase, s: Struct) -> Ty {
736 } 730 }
737} 731}
738 732
739pub(crate) fn type_for_enum(db: &impl HirDatabase, s: Enum) -> Ty { 733fn type_for_enum(db: &impl HirDatabase, s: Enum) -> Ty {
740 let generics = s.generic_params(db); 734 let generics = s.generic_params(db);
741 Ty::Adt { 735 Ty::Adt {
742 def_id: s.into(), 736 def_id: s.into(),
@@ -1393,6 +1387,37 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
1393 ty 1387 ty
1394 } 1388 }
1395 1389
1390 fn substs_for_method_call(
1391 &mut self,
1392 def_generics: Option<Arc<GenericParams>>,
1393 generic_args: &Option<GenericArgs>,
1394 ) -> Substs {
1395 let (parent_param_count, param_count) =
1396 def_generics.map_or((0, 0), |g| (g.count_parent_params(), g.params.len()));
1397 let mut substs = Vec::with_capacity(parent_param_count + param_count);
1398 for _ in 0..parent_param_count {
1399 substs.push(Ty::Unknown);
1400 }
1401 // handle provided type arguments
1402 if let Some(generic_args) = generic_args {
1403 // if args are provided, it should be all of them, but we can't rely on that
1404 for arg in generic_args.args.iter().take(param_count) {
1405 match arg {
1406 GenericArg::Type(type_ref) => {
1407 let ty = self.make_ty(type_ref);
1408 substs.push(ty);
1409 }
1410 }
1411 }
1412 };
1413 let supplied_params = substs.len();
1414 for _ in supplied_params..parent_param_count + param_count {
1415 substs.push(Ty::Unknown);
1416 }
1417 assert_eq!(substs.len(), parent_param_count + param_count);
1418 Substs(substs.into())
1419 }
1420
1396 fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { 1421 fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
1397 let body = Arc::clone(&self.body); // avoid borrow checker problem 1422 let body = Arc::clone(&self.body); // avoid borrow checker problem
1398 let ty = match &body[tgt_expr] { 1423 let ty = match &body[tgt_expr] {
@@ -1483,25 +1508,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
1483 } 1508 }
1484 None => (Ty::Unknown, receiver_ty, None), 1509 None => (Ty::Unknown, receiver_ty, None),
1485 }; 1510 };
1486 // handle provided type arguments 1511 let substs = self.substs_for_method_call(def_generics, generic_args);
1487 let method_ty = if let Some(generic_args) = generic_args { 1512 let method_ty = method_ty.apply_substs(substs);
1488 // if args are provided, it should be all of them, but we can't rely on that
1489 let param_count = def_generics.map(|g| g.params.len()).unwrap_or(0);
1490 let mut new_substs = Vec::with_capacity(generic_args.args.len());
1491 for arg in generic_args.args.iter().take(param_count) {
1492 match arg {
1493 GenericArg::Type(type_ref) => {
1494 let ty = self.make_ty(type_ref);
1495 new_substs.push(ty);
1496 }
1497 }
1498 }
1499 let substs = method_ty.substs().unwrap_or_else(Substs::empty);
1500 let substs = substs.replace_tail(new_substs);
1501 method_ty.apply_substs(substs)
1502 } else {
1503 method_ty
1504 };
1505 let method_ty = self.insert_type_vars(method_ty); 1513 let method_ty = self.insert_type_vars(method_ty);
1506 let (expected_receiver_ty, param_tys, ret_ty) = match &method_ty { 1514 let (expected_receiver_ty, param_tys, ret_ty) = match &method_ty {
1507 Ty::FnPtr(sig) => { 1515 Ty::FnPtr(sig) => {
diff --git a/crates/ra_hir/src/ty/snapshots/tests__infer_struct.snap b/crates/ra_hir/src/ty/snapshots/tests__infer_struct.snap
index 294186b06..32f1fa108 100644
--- a/crates/ra_hir/src/ty/snapshots/tests__infer_struct.snap
+++ b/crates/ra_hir/src/ty/snapshots/tests__infer_struct.snap
@@ -1,19 +1,19 @@
1--- 1---
2created: "2019-02-17T16:16:58.863630956Z" 2created: "2019-02-20T21:31:12.910924715Z"
3creator: [email protected] 3creator: [email protected]
4source: crates/ra_hir/src/ty/tests.rs 4source: crates/ra_hir/src/ty/tests.rs
5expression: "&result" 5expression: "&result"
6--- 6---
7[72; 154) '{ ...a.c; }': () 7[72; 154) '{ ...a.c; }': ()
8[82; 83) 'c': C 8[82; 83) 'c': C
9[86; 87) 'C': fn C(usize) -> C 9[86; 87) 'C': C(usize) -> C
10[86; 90) 'C(1)': C 10[86; 90) 'C(1)': C
11[88; 89) '1': usize 11[88; 89) '1': usize
12[96; 97) 'B': B 12[96; 97) 'B': B
13[107; 108) 'a': A 13[107; 108) 'a': A
14[114; 133) 'A { b:...C(1) }': A 14[114; 133) 'A { b:...C(1) }': A
15[121; 122) 'B': B 15[121; 122) 'B': B
16[127; 128) 'C': fn C(usize) -> C 16[127; 128) 'C': C(usize) -> C
17[127; 131) 'C(1)': C 17[127; 131) 'C(1)': C
18[129; 130) '1': usize 18[129; 130) '1': usize
19[139; 140) 'a': A 19[139; 140) 'a': A
diff --git a/crates/ra_hir/src/ty/snapshots/tests__infer_tuple_struct_generics.snap b/crates/ra_hir/src/ty/snapshots/tests__infer_tuple_struct_generics.snap
new file mode 100644
index 000000000..783795cfd
--- /dev/null
+++ b/crates/ra_hir/src/ty/snapshots/tests__infer_tuple_struct_generics.snap
@@ -0,0 +1,23 @@
1---
2created: "2019-02-20T21:31:12.911275141Z"
3creator: [email protected]
4source: crates/ra_hir/src/ty/tests.rs
5expression: "&result"
6---
7[77; 185) '{ ...one; }': ()
8[83; 84) 'A': A<i32>(T) -> A<T>
9[83; 88) 'A(42)': A<i32>
10[85; 87) '42': i32
11[94; 95) 'A': A<u128>(T) -> A<T>
12[94; 103) 'A(42u128)': A<u128>
13[96; 102) '42u128': u128
14[109; 113) 'Some': Some<&str>(T) -> Option<T>
15[109; 118) 'Some("x")': Option<&str>
16[114; 117) '"x"': &str
17[124; 136) 'Option::Some': Some<&str>(T) -> Option<T>
18[124; 141) 'Option...e("x")': Option<&str>
19[137; 140) '"x"': &str
20[147; 151) 'None': Option<[unknown]>
21[161; 162) 'x': Option<i64>
22[178; 182) 'None': Option<i64>
23
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs
index fee6ed0b3..f04e9109c 100644
--- a/crates/ra_hir/src/ty/tests.rs
+++ b/crates/ra_hir/src/ty/tests.rs
@@ -466,6 +466,27 @@ fn test(a1: A<u32>, i: i32) {
466} 466}
467 467
468#[test] 468#[test]
469fn infer_tuple_struct_generics() {
470 check_inference(
471 "infer_tuple_struct_generics",
472 r#"
473struct A<T>(T);
474enum Option<T> { Some(T), None };
475use Option::*;
476
477fn test() {
478 A(42);
479 A(42u128);
480 Some("x");
481 Option::Some("x");
482 None;
483 let x: Option<i64> = None;
484}
485"#,
486 );
487}
488
489#[test]
469fn infer_generics_in_patterns() { 490fn infer_generics_in_patterns() {
470 check_inference( 491 check_inference(
471 "infer_generics_in_patterns", 492 "infer_generics_in_patterns",
diff --git a/crates/ra_ide_api/src/hover.rs b/crates/ra_ide_api/src/hover.rs
index 38671b394..c62683ad4 100644
--- a/crates/ra_ide_api/src/hover.rs
+++ b/crates/ra_ide_api/src/hover.rs
@@ -164,6 +164,23 @@ mod tests {
164 } 164 }
165 165
166 #[test] 166 #[test]
167 fn hover_some() {
168 let (analysis, position) = single_file_with_position(
169 "
170 enum Option<T> { Some(T) }
171 use Option::Some;
172
173 fn main() {
174 So<|>me(12);
175 }
176 ",
177 );
178 let hover = analysis.hover(position).unwrap().unwrap();
179 // not the nicest way to show it currently
180 assert_eq!(hover.info, "Some<i32>(T) -> Option<T>");
181 }
182
183 #[test]
167 fn hover_for_local_variable() { 184 fn hover_for_local_variable() {
168 let (analysis, position) = single_file_with_position("fn func(foo: i32) { fo<|>o; }"); 185 let (analysis, position) = single_file_with_position("fn func(foo: i32) { fo<|>o; }");
169 let hover = analysis.hover(position).unwrap().unwrap(); 186 let hover = analysis.hover(position).unwrap().unwrap();
diff --git a/crates/ra_ide_api/src/lib.rs b/crates/ra_ide_api/src/lib.rs
index 57a490fa7..4b9fc9372 100644
--- a/crates/ra_ide_api/src/lib.rs
+++ b/crates/ra_ide_api/src/lib.rs
@@ -38,7 +38,7 @@ mod marks;
38 38
39use std::sync::Arc; 39use std::sync::Arc;
40 40
41use ra_syntax::{SourceFile, TreeArc, TextRange, TextUnit}; 41use ra_syntax::{SourceFile, TreeArc, TextRange, TextUnit, AstNode};
42use ra_text_edit::TextEdit; 42use ra_text_edit::TextEdit;
43use ra_db::{ 43use ra_db::{
44 SourceDatabase, CheckCanceled, 44 SourceDatabase, CheckCanceled,
@@ -244,8 +244,7 @@ impl Analysis {
244 /// Returns a syntax tree represented as `String`, for debug purposes. 244 /// Returns a syntax tree represented as `String`, for debug purposes.
245 // FIXME: use a better name here. 245 // FIXME: use a better name here.
246 pub fn syntax_tree(&self, file_id: FileId) -> String { 246 pub fn syntax_tree(&self, file_id: FileId) -> String {
247 let file = self.db.parse(file_id); 247 self.db.parse(file_id).syntax().debug_dump()
248 ra_ide_api_light::syntax_tree(&file)
249 } 248 }
250 249
251 /// Returns an edit to remove all newlines in the range, cleaning up minor 250 /// Returns an edit to remove all newlines in the range, cleaning up minor
diff --git a/crates/ra_ide_api_light/src/join_lines.rs b/crates/ra_ide_api_light/src/join_lines.rs
index 970afd327..b5bcd62fb 100644
--- a/crates/ra_ide_api_light/src/join_lines.rs
+++ b/crates/ra_ide_api_light/src/join_lines.rs
@@ -2,8 +2,9 @@ use itertools::Itertools;
2use ra_syntax::{ 2use ra_syntax::{
3 SourceFile, TextRange, TextUnit, AstNode, SyntaxNode, 3 SourceFile, TextRange, TextUnit, AstNode, SyntaxNode,
4 SyntaxKind::{self, WHITESPACE, COMMA, R_CURLY, R_PAREN, R_BRACK}, 4 SyntaxKind::{self, WHITESPACE, COMMA, R_CURLY, R_PAREN, R_BRACK},
5 algo::find_covering_node, 5 algo::{find_covering_node, non_trivia_sibling},
6 ast, 6 ast,
7 Direction,
7}; 8};
8use ra_fmt::{ 9use ra_fmt::{
9 compute_ws, extract_trivial_expression 10 compute_ws, extract_trivial_expression
@@ -120,11 +121,30 @@ fn remove_newline(
120 } 121 }
121} 122}
122 123
124fn has_comma_after(node: &SyntaxNode) -> bool {
125 match non_trivia_sibling(node, Direction::Next) {
126 Some(n) => n.kind() == COMMA,
127 _ => false,
128 }
129}
130
123fn join_single_expr_block(edit: &mut TextEditBuilder, node: &SyntaxNode) -> Option<()> { 131fn join_single_expr_block(edit: &mut TextEditBuilder, node: &SyntaxNode) -> Option<()> {
124 let block = ast::Block::cast(node.parent()?)?; 132 let block = ast::Block::cast(node.parent()?)?;
125 let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?; 133 let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?;
126 let expr = extract_trivial_expression(block)?; 134 let expr = extract_trivial_expression(block)?;
127 edit.replace(block_expr.syntax().range(), expr.syntax().text().to_string()); 135
136 let block_range = block_expr.syntax().range();
137 let mut buf = expr.syntax().text().to_string();
138
139 // Match block needs to have a comma after the block
140 if let Some(match_arm) = block_expr.syntax().parent().and_then(ast::MatchArm::cast) {
141 if !has_comma_after(match_arm.syntax()) {
142 buf.push(',');
143 }
144 }
145
146 edit.replace(block_range, buf);
147
128 Some(()) 148 Some(())
129} 149}
130 150
@@ -208,6 +228,140 @@ fn foo() {
208 } 228 }
209 229
210 #[test] 230 #[test]
231 fn join_lines_adds_comma_for_block_in_match_arm() {
232 check_join_lines(
233 r"
234fn foo(e: Result<U, V>) {
235 match e {
236 Ok(u) => <|>{
237 u.foo()
238 }
239 Err(v) => v,
240 }
241}",
242 r"
243fn foo(e: Result<U, V>) {
244 match e {
245 Ok(u) => <|>u.foo(),
246 Err(v) => v,
247 }
248}",
249 );
250 }
251
252 #[test]
253 fn join_lines_keeps_comma_for_block_in_match_arm() {
254 // We already have a comma
255 check_join_lines(
256 r"
257fn foo(e: Result<U, V>) {
258 match e {
259 Ok(u) => <|>{
260 u.foo()
261 },
262 Err(v) => v,
263 }
264}",
265 r"
266fn foo(e: Result<U, V>) {
267 match e {
268 Ok(u) => <|>u.foo(),
269 Err(v) => v,
270 }
271}",
272 );
273
274 // comma with whitespace between brace and ,
275 check_join_lines(
276 r"
277fn foo(e: Result<U, V>) {
278 match e {
279 Ok(u) => <|>{
280 u.foo()
281 } ,
282 Err(v) => v,
283 }
284}",
285 r"
286fn foo(e: Result<U, V>) {
287 match e {
288 Ok(u) => <|>u.foo() ,
289 Err(v) => v,
290 }
291}",
292 );
293
294 // comma with newline between brace and ,
295 check_join_lines(
296 r"
297fn foo(e: Result<U, V>) {
298 match e {
299 Ok(u) => <|>{
300 u.foo()
301 }
302 ,
303 Err(v) => v,
304 }
305}",
306 r"
307fn foo(e: Result<U, V>) {
308 match e {
309 Ok(u) => <|>u.foo()
310 ,
311 Err(v) => v,
312 }
313}",
314 );
315 }
316
317 #[test]
318 fn join_lines_keeps_comma_with_single_arg_tuple() {
319 // A single arg tuple
320 check_join_lines(
321 r"
322fn foo() {
323 let x = (<|>{
324 4
325 },);
326}",
327 r"
328fn foo() {
329 let x = (<|>4,);
330}",
331 );
332
333 // single arg tuple with whitespace between brace and comma
334 check_join_lines(
335 r"
336fn foo() {
337 let x = (<|>{
338 4
339 } ,);
340}",
341 r"
342fn foo() {
343 let x = (<|>4 ,);
344}",
345 );
346
347 // single arg tuple with newline between brace and comma
348 check_join_lines(
349 r"
350fn foo() {
351 let x = (<|>{
352 4
353 }
354 ,);
355}",
356 r"
357fn foo() {
358 let x = (<|>4
359 ,);
360}",
361 );
362 }
363
364 #[test]
211 fn test_join_lines_use_items_left() { 365 fn test_join_lines_use_items_left() {
212 // No space after the '{' 366 // No space after the '{'
213 check_join_lines( 367 check_join_lines(
diff --git a/crates/ra_ide_api_light/src/lib.rs b/crates/ra_ide_api_light/src/lib.rs
index 6d1ce8dbf..43cdd6ea4 100644
--- a/crates/ra_ide_api_light/src/lib.rs
+++ b/crates/ra_ide_api_light/src/lib.rs
@@ -123,10 +123,6 @@ pub fn highlight(root: &SyntaxNode) -> Vec<HighlightedRange> {
123 res 123 res
124} 124}
125 125
126pub fn syntax_tree(file: &SourceFile) -> String {
127 ::ra_syntax::utils::dump_tree(file.syntax())
128}
129
130#[cfg(test)] 126#[cfg(test)]
131mod tests { 127mod tests {
132 use ra_syntax::AstNode; 128 use ra_syntax::AstNode;
diff --git a/crates/ra_lsp_server/Cargo.toml b/crates/ra_lsp_server/Cargo.toml
index 6342e0628..ef6dea393 100644
--- a/crates/ra_lsp_server/Cargo.toml
+++ b/crates/ra_lsp_server/Cargo.toml
@@ -15,7 +15,7 @@ crossbeam-channel = "0.3.5"
15flexi_logger = "0.10.0" 15flexi_logger = "0.10.0"
16log = "0.4.3" 16log = "0.4.3"
17url_serde = "0.2.0" 17url_serde = "0.2.0"
18lsp-types = "0.55.0" 18lsp-types = "0.56.0"
19rustc-hash = "1.0" 19rustc-hash = "1.0"
20parking_lot = "0.7.0" 20parking_lot = "0.7.0"
21 21
diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs
index 9208ee473..9abd4054e 100644
--- a/crates/ra_lsp_server/src/main_loop/handlers.rs
+++ b/crates/ra_lsp_server/src/main_loop/handlers.rs
@@ -419,7 +419,7 @@ pub fn handle_signature_help(
419 Ok(Some(req::SignatureHelp { 419 Ok(Some(req::SignatureHelp {
420 signatures: vec![sig_info], 420 signatures: vec![sig_info],
421 active_signature: Some(0), 421 active_signature: Some(0),
422 active_parameter: call_info.active_parameter.map(|it| it as u64), 422 active_parameter: call_info.active_parameter.map(|it| it as i64),
423 })) 423 }))
424 } else { 424 } else {
425 Ok(None) 425 Ok(None)
diff --git a/crates/ra_parser/Cargo.toml b/crates/ra_parser/Cargo.toml
new file mode 100644
index 000000000..b110e2bc6
--- /dev/null
+++ b/crates/ra_parser/Cargo.toml
@@ -0,0 +1,9 @@
1[package]
2edition = "2018"
3name = "ra_parser"
4version = "0.1.0"
5authors = ["rust-analyzer developers"]
6publish = false
7
8[dependencies]
9drop_bomb = "0.1.4"
diff --git a/crates/ra_parser/src/event.rs b/crates/ra_parser/src/event.rs
new file mode 100644
index 000000000..6361d5d86
--- /dev/null
+++ b/crates/ra_parser/src/event.rs
@@ -0,0 +1,127 @@
1//! This module provides a way to construct a `File`.
2//! It is intended to be completely decoupled from the
3//! parser, so as to allow to evolve the tree representation
4//! and the parser algorithm independently.
5//!
6//! The `TreeSink` trait is the bridge between the parser and the
7//! tree builder: the parser produces a stream of events like
8//! `start node`, `finish node`, and `FileBuilder` converts
9//! this stream to a real tree.
10use std::mem;
11
12use crate::{
13 ParseError, TreeSink,
14 SyntaxKind::{self, *},
15};
16
17/// `Parser` produces a flat list of `Event`s.
18/// They are converted to a tree-structure in
19/// a separate pass, via `TreeBuilder`.
20#[derive(Debug)]
21pub(crate) enum Event {
22 /// This event signifies the start of the node.
23 /// It should be either abandoned (in which case the
24 /// `kind` is `TOMBSTONE`, and the event is ignored),
25 /// or completed via a `Finish` event.
26 ///
27 /// All tokens between a `Start` and a `Finish` would
28 /// become the children of the respective node.
29 ///
30 /// For left-recursive syntactic constructs, the parser produces
31 /// a child node before it sees a parent. `forward_parent`
32 /// saves the position of current event's parent.
33 ///
34 /// Consider this path
35 ///
36 /// foo::bar
37 ///
38 /// The events for it would look like this:
39 ///
40 ///
41 /// START(PATH) IDENT('foo') FINISH START(PATH) COLONCOLON IDENT('bar') FINISH
42 /// | /\
43 /// | |
44 /// +------forward-parent------+
45 ///
46 /// And the tree would look like this
47 ///
48 /// +--PATH---------+
49 /// | | |
50 /// | | |
51 /// | '::' 'bar'
52 /// |
53 /// PATH
54 /// |
55 /// 'foo'
56 ///
57 /// See also `CompletedMarker::precede`.
58 Start {
59 kind: SyntaxKind,
60 forward_parent: Option<u32>,
61 },
62
63 /// Complete the previous `Start` event
64 Finish,
65
66 /// Produce a single leaf-element.
67 /// `n_raw_tokens` is used to glue complex contextual tokens.
68 /// For example, lexer tokenizes `>>` as `>`, `>`, and
69 /// `n_raw_tokens = 2` is used to produced a single `>>`.
70 Token {
71 kind: SyntaxKind,
72 n_raw_tokens: u8,
73 },
74
75 Error {
76 msg: ParseError,
77 },
78}
79
80impl Event {
81 pub(crate) fn tombstone() -> Self {
82 Event::Start { kind: TOMBSTONE, forward_parent: None }
83 }
84}
85
86/// Generate the syntax tree with the control of events.
87pub(super) fn process(sink: &mut dyn TreeSink, mut events: Vec<Event>) {
88 let mut forward_parents = Vec::new();
89
90 for i in 0..events.len() {
91 match mem::replace(&mut events[i], Event::tombstone()) {
92 Event::Start { kind: TOMBSTONE, .. } => (),
93
94 Event::Start { kind, forward_parent } => {
95 // For events[A, B, C], B is A's forward_parent, C is B's forward_parent,
96 // in the normal control flow, the parent-child relation: `A -> B -> C`,
97 // while with the magic forward_parent, it writes: `C <- B <- A`.
98
99 // append `A` into parents.
100 forward_parents.push(kind);
101 let mut idx = i;
102 let mut fp = forward_parent;
103 while let Some(fwd) = fp {
104 idx += fwd as usize;
105 // append `A`'s forward_parent `B`
106 fp = match mem::replace(&mut events[idx], Event::tombstone()) {
107 Event::Start { kind, forward_parent } => {
108 forward_parents.push(kind);
109 forward_parent
110 }
111 _ => unreachable!(),
112 };
113 // append `B`'s forward_parent `C` in the next stage.
114 }
115
116 for kind in forward_parents.drain(..).rev() {
117 sink.start_branch(kind);
118 }
119 }
120 Event::Finish => sink.finish_branch(),
121 Event::Token { kind, n_raw_tokens } => {
122 sink.leaf(kind, n_raw_tokens);
123 }
124 Event::Error { msg } => sink.error(msg),
125 }
126 }
127}
diff --git a/crates/ra_syntax/src/parsing/grammar.rs b/crates/ra_parser/src/grammar.rs
index 7ca9c223c..15aab6c6f 100644
--- a/crates/ra_syntax/src/parsing/grammar.rs
+++ b/crates/ra_parser/src/grammar.rs
@@ -37,23 +37,24 @@ mod type_params;
37mod types; 37mod types;
38 38
39use crate::{ 39use crate::{
40 SyntaxNode,
41 SyntaxKind::{self, *}, 40 SyntaxKind::{self, *},
42 parsing::{ 41 TokenSet,
43 token_set::TokenSet, 42 parser::{CompletedMarker, Marker, Parser},
44 parser::{CompletedMarker, Marker, Parser}
45 },
46}; 43};
47 44
48pub(super) fn root(p: &mut Parser) { 45pub(crate) fn root(p: &mut Parser) {
49 let m = p.start(); 46 let m = p.start();
50 p.eat(SHEBANG); 47 p.eat(SHEBANG);
51 items::mod_contents(p, false); 48 items::mod_contents(p, false);
52 m.complete(p, SOURCE_FILE); 49 m.complete(p, SOURCE_FILE);
53} 50}
54 51
55pub(super) fn reparser(node: &SyntaxNode) -> Option<fn(&mut Parser)> { 52pub(crate) fn reparser(
56 let res = match node.kind() { 53 node: SyntaxKind,
54 first_child: Option<SyntaxKind>,
55 parent: Option<SyntaxKind>,
56) -> Option<fn(&mut Parser)> {
57 let res = match node {
57 BLOCK => expressions::block, 58 BLOCK => expressions::block,
58 NAMED_FIELD_DEF_LIST => items::named_field_def_list, 59 NAMED_FIELD_DEF_LIST => items::named_field_def_list,
59 NAMED_FIELD_LIST => items::named_field_list, 60 NAMED_FIELD_LIST => items::named_field_list,
@@ -61,16 +62,13 @@ pub(super) fn reparser(node: &SyntaxNode) -> Option<fn(&mut Parser)> {
61 MATCH_ARM_LIST => items::match_arm_list, 62 MATCH_ARM_LIST => items::match_arm_list,
62 USE_TREE_LIST => items::use_tree_list, 63 USE_TREE_LIST => items::use_tree_list,
63 EXTERN_ITEM_LIST => items::extern_item_list, 64 EXTERN_ITEM_LIST => items::extern_item_list,
64 TOKEN_TREE if node.first_child().unwrap().kind() == L_CURLY => items::token_tree, 65 TOKEN_TREE if first_child? == L_CURLY => items::token_tree,
65 ITEM_LIST => { 66 ITEM_LIST => match parent? {
66 let parent = node.parent().unwrap(); 67 IMPL_BLOCK => items::impl_item_list,
67 match parent.kind() { 68 TRAIT_DEF => items::trait_item_list,
68 IMPL_BLOCK => items::impl_item_list, 69 MODULE => items::mod_item_list,
69 TRAIT_DEF => items::trait_item_list, 70 _ => return None,
70 MODULE => items::mod_item_list, 71 },
71 _ => return None,
72 }
73 }
74 _ => return None, 72 _ => return None,
75 }; 73 };
76 Some(res) 74 Some(res)
diff --git a/crates/ra_syntax/src/parsing/grammar/attributes.rs b/crates/ra_parser/src/grammar/attributes.rs
index cd30e8a45..cd30e8a45 100644
--- a/crates/ra_syntax/src/parsing/grammar/attributes.rs
+++ b/crates/ra_parser/src/grammar/attributes.rs
diff --git a/crates/ra_syntax/src/parsing/grammar/expressions.rs b/crates/ra_parser/src/grammar/expressions.rs
index d5a4f4d7b..d5a4f4d7b 100644
--- a/crates/ra_syntax/src/parsing/grammar/expressions.rs
+++ b/crates/ra_parser/src/grammar/expressions.rs
diff --git a/crates/ra_syntax/src/parsing/grammar/expressions/atom.rs b/crates/ra_parser/src/grammar/expressions/atom.rs
index e74305b6a..e74305b6a 100644
--- a/crates/ra_syntax/src/parsing/grammar/expressions/atom.rs
+++ b/crates/ra_parser/src/grammar/expressions/atom.rs
diff --git a/crates/ra_syntax/src/parsing/grammar/items.rs b/crates/ra_parser/src/grammar/items.rs
index 4b962c1f3..4b962c1f3 100644
--- a/crates/ra_syntax/src/parsing/grammar/items.rs
+++ b/crates/ra_parser/src/grammar/items.rs
diff --git a/crates/ra_syntax/src/parsing/grammar/items/consts.rs b/crates/ra_parser/src/grammar/items/consts.rs
index 5a5852f83..5a5852f83 100644
--- a/crates/ra_syntax/src/parsing/grammar/items/consts.rs
+++ b/crates/ra_parser/src/grammar/items/consts.rs
diff --git a/crates/ra_syntax/src/parsing/grammar/items/nominal.rs b/crates/ra_parser/src/grammar/items/nominal.rs
index ff9b38f9c..ff9b38f9c 100644
--- a/crates/ra_syntax/src/parsing/grammar/items/nominal.rs
+++ b/crates/ra_parser/src/grammar/items/nominal.rs
diff --git a/crates/ra_syntax/src/parsing/grammar/items/traits.rs b/crates/ra_parser/src/grammar/items/traits.rs
index d5a8ccd98..d5a8ccd98 100644
--- a/crates/ra_syntax/src/parsing/grammar/items/traits.rs
+++ b/crates/ra_parser/src/grammar/items/traits.rs
diff --git a/crates/ra_syntax/src/parsing/grammar/items/use_item.rs b/crates/ra_parser/src/grammar/items/use_item.rs
index 5111d37eb..5111d37eb 100644
--- a/crates/ra_syntax/src/parsing/grammar/items/use_item.rs
+++ b/crates/ra_parser/src/grammar/items/use_item.rs
diff --git a/crates/ra_syntax/src/parsing/grammar/params.rs b/crates/ra_parser/src/grammar/params.rs
index 185386569..185386569 100644
--- a/crates/ra_syntax/src/parsing/grammar/params.rs
+++ b/crates/ra_parser/src/grammar/params.rs
diff --git a/crates/ra_syntax/src/parsing/grammar/paths.rs b/crates/ra_parser/src/grammar/paths.rs
index 33a11886c..33a11886c 100644
--- a/crates/ra_syntax/src/parsing/grammar/paths.rs
+++ b/crates/ra_parser/src/grammar/paths.rs
diff --git a/crates/ra_syntax/src/parsing/grammar/patterns.rs b/crates/ra_parser/src/grammar/patterns.rs
index 9d7da639d..9d7da639d 100644
--- a/crates/ra_syntax/src/parsing/grammar/patterns.rs
+++ b/crates/ra_parser/src/grammar/patterns.rs
diff --git a/crates/ra_syntax/src/parsing/grammar/type_args.rs b/crates/ra_parser/src/grammar/type_args.rs
index f889419c5..f889419c5 100644
--- a/crates/ra_syntax/src/parsing/grammar/type_args.rs
+++ b/crates/ra_parser/src/grammar/type_args.rs
diff --git a/crates/ra_syntax/src/parsing/grammar/type_params.rs b/crates/ra_parser/src/grammar/type_params.rs
index 40f998682..40f998682 100644
--- a/crates/ra_syntax/src/parsing/grammar/type_params.rs
+++ b/crates/ra_parser/src/grammar/type_params.rs
diff --git a/crates/ra_syntax/src/parsing/grammar/types.rs b/crates/ra_parser/src/grammar/types.rs
index adc189a29..adc189a29 100644
--- a/crates/ra_syntax/src/parsing/grammar/types.rs
+++ b/crates/ra_parser/src/grammar/types.rs
diff --git a/crates/ra_parser/src/lib.rs b/crates/ra_parser/src/lib.rs
new file mode 100644
index 000000000..ddc08e462
--- /dev/null
+++ b/crates/ra_parser/src/lib.rs
@@ -0,0 +1,88 @@
1//! The Rust parser.
2//!
3//! The parser doesn't know about concrete representation of tokens and syntax
4//! trees. Abstract `TokenSource` and `TreeSink` traits are used instead. As a
5//! consequence, this crates does not contain a lexer.
6//!
7//! The `Parser` struct from the `parser` module is a cursor into the sequence
8//! of tokens. Parsing routines use `Parser` to inspect current state and
9//! advance the parsing.
10//!
11//! The actual parsing happens in the `grammar` module.
12//!
13//! Tests for this crate live in `ra_syntax` crate.
14
15#[macro_use]
16mod token_set;
17mod syntax_kind;
18mod event;
19mod parser;
20mod grammar;
21
22pub(crate) use token_set::TokenSet;
23
24pub use syntax_kind::SyntaxKind;
25
26#[derive(Debug, Clone, PartialEq, Eq, Hash)]
27pub struct ParseError(pub String);
28
29/// `TokenSource` abstracts the source of the tokens parser operates one.
30///
31/// Hopefully this will allow us to treat text and token trees in the same way!
32pub trait TokenSource {
33 /// What is the current token?
34 fn token_kind(&self, pos: usize) -> SyntaxKind;
35 /// Is the current token joined to the next one (`> >` vs `>>`).
36 fn is_token_joint_to_next(&self, pos: usize) -> bool;
37 /// Is the current token a specified keyword?
38 fn is_keyword(&self, pos: usize, kw: &str) -> bool;
39}
40
41/// `TreeSink` abstracts details of a particular syntax tree implementation.
42pub trait TreeSink {
43 /// Adds new leaf to the current branch.
44 fn leaf(&mut self, kind: SyntaxKind, n_tokens: u8);
45
46 /// Start new branch and make it current.
47 fn start_branch(&mut self, kind: SyntaxKind);
48
49 /// Finish current branch and restore previous
50 /// branch as current.
51 fn finish_branch(&mut self);
52
53 fn error(&mut self, error: ParseError);
54}
55
56/// Parse given tokens into the given sink as a rust file.
57pub fn parse(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
58 let mut p = parser::Parser::new(token_source);
59 grammar::root(&mut p);
60 let events = p.finish();
61 event::process(tree_sink, events);
62}
63
64/// A parsing function for a specific braced-block.
65pub struct Reparser(fn(&mut parser::Parser));
66
67impl Reparser {
68 /// If the node is a braced block, return the corresponding `Reparser`.
69 pub fn for_node(
70 node: SyntaxKind,
71 first_child: Option<SyntaxKind>,
72 parent: Option<SyntaxKind>,
73 ) -> Option<Reparser> {
74 grammar::reparser(node, first_child, parent).map(Reparser)
75 }
76
77 /// Re-parse given tokens using this `Reparser`.
78 ///
79 /// Tokens must start with `{`, end with `}` and form a valid brace
80 /// sequence.
81 pub fn parse(self, token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
82 let Reparser(r) = self;
83 let mut p = parser::Parser::new(token_source);
84 r(&mut p);
85 let events = p.finish();
86 event::process(tree_sink, events);
87 }
88}
diff --git a/crates/ra_syntax/src/parsing/parser.rs b/crates/ra_parser/src/parser.rs
index 923b0f2b2..a18458148 100644
--- a/crates/ra_syntax/src/parsing/parser.rs
+++ b/crates/ra_parser/src/parser.rs
@@ -4,11 +4,8 @@ use drop_bomb::DropBomb;
4 4
5use crate::{ 5use crate::{
6 SyntaxKind::{self, ERROR, EOF, TOMBSTONE}, 6 SyntaxKind::{self, ERROR, EOF, TOMBSTONE},
7 parsing::{ 7 TokenSource, ParseError, TokenSet,
8 TokenSource, ParseError, 8 event::Event,
9 token_set::TokenSet,
10 event::Event,
11 },
12}; 9};
13 10
14/// `Parser` struct provides the low-level API for 11/// `Parser` struct provides the low-level API for
diff --git a/crates/ra_syntax/src/syntax_kinds.rs b/crates/ra_parser/src/syntax_kind.rs
index c1118c5ab..a2353317f 100644
--- a/crates/ra_syntax/src/syntax_kinds.rs
+++ b/crates/ra_parser/src/syntax_kind.rs
@@ -2,8 +2,6 @@ mod generated;
2 2
3use std::fmt; 3use std::fmt;
4 4
5use crate::SyntaxKind::*;
6
7pub use self::generated::SyntaxKind; 5pub use self::generated::SyntaxKind;
8 6
9impl fmt::Debug for SyntaxKind { 7impl fmt::Debug for SyntaxKind {
@@ -20,7 +18,7 @@ pub(crate) struct SyntaxInfo {
20impl SyntaxKind { 18impl SyntaxKind {
21 pub fn is_trivia(self) -> bool { 19 pub fn is_trivia(self) -> bool {
22 match self { 20 match self {
23 WHITESPACE | COMMENT => true, 21 SyntaxKind::WHITESPACE | SyntaxKind::COMMENT => true,
24 _ => false, 22 _ => false,
25 } 23 }
26 } 24 }
diff --git a/crates/ra_syntax/src/syntax_kinds/generated.rs b/crates/ra_parser/src/syntax_kind/generated.rs
index 266b95bbb..1d8f988ae 100644
--- a/crates/ra_syntax/src/syntax_kinds/generated.rs
+++ b/crates/ra_parser/src/syntax_kind/generated.rs
@@ -568,7 +568,7 @@ impl SyntaxKind {
568 EOF => &SyntaxInfo { name: "EOF" }, 568 EOF => &SyntaxInfo { name: "EOF" },
569 } 569 }
570 } 570 }
571 pub(crate) fn from_keyword(ident: &str) -> Option<SyntaxKind> { 571 pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
572 let kw = match ident { 572 let kw = match ident {
573 "use" => USE_KW, 573 "use" => USE_KW,
574 "fn" => FN_KW, 574 "fn" => FN_KW,
@@ -610,7 +610,7 @@ impl SyntaxKind {
610 Some(kw) 610 Some(kw)
611 } 611 }
612 612
613 pub(crate) fn from_char(c: char) -> Option<SyntaxKind> { 613 pub fn from_char(c: char) -> Option<SyntaxKind> {
614 let tok = match c { 614 let tok = match c {
615 ';' => SEMI, 615 ';' => SEMI,
616 ',' => COMMA, 616 ',' => COMMA,
diff --git a/crates/ra_syntax/src/syntax_kinds/generated.rs.tera b/crates/ra_parser/src/syntax_kind/generated.rs.tera
index 837437136..f241a21a0 100644
--- a/crates/ra_syntax/src/syntax_kinds/generated.rs.tera
+++ b/crates/ra_parser/src/syntax_kind/generated.rs.tera
@@ -74,7 +74,7 @@ impl SyntaxKind {
74 EOF => &SyntaxInfo { name: "EOF" }, 74 EOF => &SyntaxInfo { name: "EOF" },
75 } 75 }
76 } 76 }
77 pub(crate) fn from_keyword(ident: &str) -> Option<SyntaxKind> { 77 pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
78 let kw = match ident { 78 let kw = match ident {
79{%- for kw in keywords %} 79{%- for kw in keywords %}
80 "{{kw}}" => {{kw | upper}}_KW, 80 "{{kw}}" => {{kw | upper}}_KW,
@@ -84,7 +84,7 @@ impl SyntaxKind {
84 Some(kw) 84 Some(kw)
85 } 85 }
86 86
87 pub(crate) fn from_char(c: char) -> Option<SyntaxKind> { 87 pub fn from_char(c: char) -> Option<SyntaxKind> {
88 let tok = match c { 88 let tok = match c {
89{%- for t in single_byte_tokens %} 89{%- for t in single_byte_tokens %}
90 '{{t.0}}' => {{t.1}}, 90 '{{t.0}}' => {{t.1}},
diff --git a/crates/ra_syntax/src/parsing/token_set.rs b/crates/ra_parser/src/token_set.rs
index 5719fe5a2..79121b35f 100644
--- a/crates/ra_syntax/src/parsing/token_set.rs
+++ b/crates/ra_parser/src/token_set.rs
@@ -1,5 +1,6 @@
1use crate::SyntaxKind; 1use crate::SyntaxKind;
2 2
3/// A bit-set of `SyntaxKind`s
3#[derive(Clone, Copy)] 4#[derive(Clone, Copy)]
4pub(crate) struct TokenSet(u128); 5pub(crate) struct TokenSet(u128);
5 6
@@ -34,7 +35,7 @@ macro_rules! token_set {
34#[test] 35#[test]
35fn token_set_works_for_tokens() { 36fn token_set_works_for_tokens() {
36 use crate::SyntaxKind::*; 37 use crate::SyntaxKind::*;
37 let ts = token_set! { EOF, SHEBANG }; 38 let ts = token_set![EOF, SHEBANG];
38 assert!(ts.contains(EOF)); 39 assert!(ts.contains(EOF));
39 assert!(ts.contains(SHEBANG)); 40 assert!(ts.contains(SHEBANG));
40 assert!(!ts.contains(PLUS)); 41 assert!(!ts.contains(PLUS));
diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml
index 7ce26b7c4..7e70dad3f 100644
--- a/crates/ra_syntax/Cargo.toml
+++ b/crates/ra_syntax/Cargo.toml
@@ -21,6 +21,7 @@ text_unit = { version = "0.1.6", features = ["serde"] }
21smol_str = { version = "0.1.9", features = ["serde"] } 21smol_str = { version = "0.1.9", features = ["serde"] }
22 22
23ra_text_edit = { path = "../ra_text_edit" } 23ra_text_edit = { path = "../ra_text_edit" }
24ra_parser = { path = "../ra_parser" }
24 25
25[dev-dependencies] 26[dev-dependencies]
26test_utils = { path = "../test_utils" } 27test_utils = { path = "../test_utils" }
diff --git a/crates/ra_syntax/fuzz/.gitignore b/crates/ra_syntax/fuzz/.gitignore
index 572e03bdf..f734abd49 100644
--- a/crates/ra_syntax/fuzz/.gitignore
+++ b/crates/ra_syntax/fuzz/.gitignore
@@ -1,4 +1,4 @@
1 1Cargo.lock
2target 2target
3corpus 3corpus
4artifacts 4artifacts
diff --git a/crates/ra_syntax/fuzz/Cargo.lock b/crates/ra_syntax/fuzz/Cargo.lock
deleted file mode 100644
index 4be6be44f..000000000
--- a/crates/ra_syntax/fuzz/Cargo.lock
+++ /dev/null
@@ -1,520 +0,0 @@
1[[package]]
2name = "arbitrary"
3version = "0.1.1"
4source = "registry+https://github.com/rust-lang/crates.io-index"
5
6[[package]]
7name = "arrayvec"
8version = "0.4.10"
9source = "registry+https://github.com/rust-lang/crates.io-index"
10dependencies = [
11 "nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
12]
13
14[[package]]
15name = "bit-set"
16version = "0.5.0"
17source = "registry+https://github.com/rust-lang/crates.io-index"
18dependencies = [
19 "bit-vec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
20]
21
22[[package]]
23name = "bit-vec"
24version = "0.5.0"
25source = "registry+https://github.com/rust-lang/crates.io-index"
26
27[[package]]
28name = "bitflags"
29version = "1.0.4"
30source = "registry+https://github.com/rust-lang/crates.io-index"
31
32[[package]]
33name = "byteorder"
34version = "1.2.7"
35source = "registry+https://github.com/rust-lang/crates.io-index"
36
37[[package]]
38name = "cc"
39version = "1.0.28"
40source = "registry+https://github.com/rust-lang/crates.io-index"
41
42[[package]]
43name = "cfg-if"
44version = "0.1.6"
45source = "registry+https://github.com/rust-lang/crates.io-index"
46
47[[package]]
48name = "cloudabi"
49version = "0.0.3"
50source = "registry+https://github.com/rust-lang/crates.io-index"
51dependencies = [
52 "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
53]
54
55[[package]]
56name = "drop_bomb"
57version = "0.1.4"
58source = "registry+https://github.com/rust-lang/crates.io-index"
59
60[[package]]
61name = "either"
62version = "1.5.0"
63source = "registry+https://github.com/rust-lang/crates.io-index"
64
65[[package]]
66name = "fnv"
67version = "1.0.6"
68source = "registry+https://github.com/rust-lang/crates.io-index"
69
70[[package]]
71name = "fuchsia-zircon"
72version = "0.3.3"
73source = "registry+https://github.com/rust-lang/crates.io-index"
74dependencies = [
75 "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
76 "fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
77]
78
79[[package]]
80name = "fuchsia-zircon-sys"
81version = "0.3.3"
82source = "registry+https://github.com/rust-lang/crates.io-index"
83
84[[package]]
85name = "itertools"
86version = "0.8.0"
87source = "registry+https://github.com/rust-lang/crates.io-index"
88dependencies = [
89 "either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
90]
91
92[[package]]
93name = "lazy_static"
94version = "1.2.0"
95source = "registry+https://github.com/rust-lang/crates.io-index"
96
97[[package]]
98name = "libc"
99version = "0.2.45"
100source = "registry+https://github.com/rust-lang/crates.io-index"
101
102[[package]]
103name = "libfuzzer-sys"
104version = "0.1.0"
105source = "git+https://github.com/rust-fuzz/libfuzzer-sys.git#4a413199b5cb1bbed6a1d157b2342b925f8464ac"
106dependencies = [
107 "arbitrary 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
108 "cc 1.0.28 (registry+https://github.com/rust-lang/crates.io-index)",
109]
110
111[[package]]
112name = "lock_api"
113version = "0.1.5"
114source = "registry+https://github.com/rust-lang/crates.io-index"
115dependencies = [
116 "owning_ref 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
117 "scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
118]
119
120[[package]]
121name = "nodrop"
122version = "0.1.13"
123source = "registry+https://github.com/rust-lang/crates.io-index"
124
125[[package]]
126name = "num-traits"
127version = "0.2.6"
128source = "registry+https://github.com/rust-lang/crates.io-index"
129
130[[package]]
131name = "owning_ref"
132version = "0.4.0"
133source = "registry+https://github.com/rust-lang/crates.io-index"
134dependencies = [
135 "stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
136]
137
138[[package]]
139name = "parking_lot"
140version = "0.7.0"
141source = "registry+https://github.com/rust-lang/crates.io-index"
142dependencies = [
143 "lock_api 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
144 "parking_lot_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
145]
146
147[[package]]
148name = "parking_lot_core"
149version = "0.4.0"
150source = "registry+https://github.com/rust-lang/crates.io-index"
151dependencies = [
152 "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)",
153 "rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
154 "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
155 "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
156 "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
157]
158
159[[package]]
160name = "proptest"
161version = "0.8.7"
162source = "registry+https://github.com/rust-lang/crates.io-index"
163dependencies = [
164 "bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
165 "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
166 "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
167 "lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
168 "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
169 "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
170 "rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
171 "regex-syntax 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
172 "rusty-fork 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
173 "tempfile 3.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
174]
175
176[[package]]
177name = "quick-error"
178version = "1.2.2"
179source = "registry+https://github.com/rust-lang/crates.io-index"
180
181[[package]]
182name = "ra_syntax"
183version = "0.1.0"
184dependencies = [
185 "arrayvec 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
186 "drop_bomb 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
187 "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
188 "parking_lot 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
189 "ra_text_edit 0.1.0",
190 "rowan 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
191 "smol_str 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
192 "text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
193 "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
194]
195
196[[package]]
197name = "ra_syntax-fuzz"
198version = "0.0.1"
199dependencies = [
200 "libfuzzer-sys 0.1.0 (git+https://github.com/rust-fuzz/libfuzzer-sys.git)",
201 "ra_syntax 0.1.0",
202]
203
204[[package]]
205name = "ra_text_edit"
206version = "0.1.0"
207dependencies = [
208 "proptest 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)",
209 "text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
210]
211
212[[package]]
213name = "rand"
214version = "0.5.5"
215source = "registry+https://github.com/rust-lang/crates.io-index"
216dependencies = [
217 "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
218 "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
219 "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)",
220 "rand_core 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
221 "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
222]
223
224[[package]]
225name = "rand"
226version = "0.6.1"
227source = "registry+https://github.com/rust-lang/crates.io-index"
228dependencies = [
229 "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
230 "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
231 "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)",
232 "rand_chacha 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
233 "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
234 "rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
235 "rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
236 "rand_pcg 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
237 "rand_xorshift 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
238 "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
239 "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
240]
241
242[[package]]
243name = "rand_chacha"
244version = "0.1.0"
245source = "registry+https://github.com/rust-lang/crates.io-index"
246dependencies = [
247 "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
248 "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
249]
250
251[[package]]
252name = "rand_core"
253version = "0.2.2"
254source = "registry+https://github.com/rust-lang/crates.io-index"
255dependencies = [
256 "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
257]
258
259[[package]]
260name = "rand_core"
261version = "0.3.0"
262source = "registry+https://github.com/rust-lang/crates.io-index"
263
264[[package]]
265name = "rand_hc"
266version = "0.1.0"
267source = "registry+https://github.com/rust-lang/crates.io-index"
268dependencies = [
269 "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
270]
271
272[[package]]
273name = "rand_isaac"
274version = "0.1.1"
275source = "registry+https://github.com/rust-lang/crates.io-index"
276dependencies = [
277 "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
278]
279
280[[package]]
281name = "rand_pcg"
282version = "0.1.1"
283source = "registry+https://github.com/rust-lang/crates.io-index"
284dependencies = [
285 "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
286 "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
287]
288
289[[package]]
290name = "rand_xorshift"
291version = "0.1.0"
292source = "registry+https://github.com/rust-lang/crates.io-index"
293dependencies = [
294 "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
295]
296
297[[package]]
298name = "redox_syscall"
299version = "0.1.50"
300source = "registry+https://github.com/rust-lang/crates.io-index"
301
302[[package]]
303name = "regex-syntax"
304version = "0.6.4"
305source = "registry+https://github.com/rust-lang/crates.io-index"
306dependencies = [
307 "ucd-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
308]
309
310[[package]]
311name = "remove_dir_all"
312version = "0.5.1"
313source = "registry+https://github.com/rust-lang/crates.io-index"
314dependencies = [
315 "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
316]
317
318[[package]]
319name = "rowan"
320version = "0.2.0"
321source = "registry+https://github.com/rust-lang/crates.io-index"
322dependencies = [
323 "parking_lot 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
324 "smol_str 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
325 "text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
326]
327
328[[package]]
329name = "rustc_version"
330version = "0.2.3"
331source = "registry+https://github.com/rust-lang/crates.io-index"
332dependencies = [
333 "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
334]
335
336[[package]]
337name = "rusty-fork"
338version = "0.2.1"
339source = "registry+https://github.com/rust-lang/crates.io-index"
340dependencies = [
341 "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
342 "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
343 "tempfile 3.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
344 "wait-timeout 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
345]
346
347[[package]]
348name = "scopeguard"
349version = "0.3.3"
350source = "registry+https://github.com/rust-lang/crates.io-index"
351
352[[package]]
353name = "semver"
354version = "0.9.0"
355source = "registry+https://github.com/rust-lang/crates.io-index"
356dependencies = [
357 "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
358]
359
360[[package]]
361name = "semver-parser"
362version = "0.7.0"
363source = "registry+https://github.com/rust-lang/crates.io-index"
364
365[[package]]
366name = "serde"
367version = "1.0.84"
368source = "registry+https://github.com/rust-lang/crates.io-index"
369
370[[package]]
371name = "smallvec"
372version = "0.6.7"
373source = "registry+https://github.com/rust-lang/crates.io-index"
374dependencies = [
375 "unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
376]
377
378[[package]]
379name = "smol_str"
380version = "0.1.9"
381source = "registry+https://github.com/rust-lang/crates.io-index"
382dependencies = [
383 "serde 1.0.84 (registry+https://github.com/rust-lang/crates.io-index)",
384]
385
386[[package]]
387name = "stable_deref_trait"
388version = "1.1.1"
389source = "registry+https://github.com/rust-lang/crates.io-index"
390
391[[package]]
392name = "tempfile"
393version = "3.0.5"
394source = "registry+https://github.com/rust-lang/crates.io-index"
395dependencies = [
396 "cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
397 "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)",
398 "rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
399 "redox_syscall 0.1.50 (registry+https://github.com/rust-lang/crates.io-index)",
400 "remove_dir_all 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
401 "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
402]
403
404[[package]]
405name = "text_unit"
406version = "0.1.6"
407source = "registry+https://github.com/rust-lang/crates.io-index"
408dependencies = [
409 "serde 1.0.84 (registry+https://github.com/rust-lang/crates.io-index)",
410]
411
412[[package]]
413name = "ucd-util"
414version = "0.1.3"
415source = "registry+https://github.com/rust-lang/crates.io-index"
416
417[[package]]
418name = "unicode-xid"
419version = "0.1.0"
420source = "registry+https://github.com/rust-lang/crates.io-index"
421
422[[package]]
423name = "unreachable"
424version = "1.0.0"
425source = "registry+https://github.com/rust-lang/crates.io-index"
426dependencies = [
427 "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
428]
429
430[[package]]
431name = "void"
432version = "1.0.2"
433source = "registry+https://github.com/rust-lang/crates.io-index"
434
435[[package]]
436name = "wait-timeout"
437version = "0.1.5"
438source = "registry+https://github.com/rust-lang/crates.io-index"
439dependencies = [
440 "libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)",
441]
442
443[[package]]
444name = "winapi"
445version = "0.3.6"
446source = "registry+https://github.com/rust-lang/crates.io-index"
447dependencies = [
448 "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
449 "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
450]
451
452[[package]]
453name = "winapi-i686-pc-windows-gnu"
454version = "0.4.0"
455source = "registry+https://github.com/rust-lang/crates.io-index"
456
457[[package]]
458name = "winapi-x86_64-pc-windows-gnu"
459version = "0.4.0"
460source = "registry+https://github.com/rust-lang/crates.io-index"
461
462[metadata]
463"checksum arbitrary 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6c7d1523aa3a127adf8b27af2404c03c12825b4c4d0698f01648d63fa9df62ee"
464"checksum arrayvec 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "92c7fb76bc8826a8b33b4ee5bb07a247a81e76764ab4d55e8f73e3a4d8808c71"
465"checksum bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6f1efcc46c18245a69c38fcc5cc650f16d3a59d034f3106e9ed63748f695730a"
466"checksum bit-vec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4440d5cb623bb7390ae27fec0bb6c61111969860f8e3ae198bfa0663645e67cf"
467"checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12"
468"checksum byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "94f88df23a25417badc922ab0f5716cc1330e87f71ddd9203b3a3ccd9cedf75d"
469"checksum cc 1.0.28 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4a8b715cb4597106ea87c7c84b2f1d452c7492033765df7f32651e66fcf749"
470"checksum cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "082bb9b28e00d3c9d39cc03e64ce4cea0f1bb9b3fde493f0cbc008472d22bdf4"
471"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
472"checksum drop_bomb 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "69b26e475fd29098530e709294e94e661974c851aed42512793f120fed4e199f"
473"checksum either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0"
474"checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3"
475"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
476"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
477"checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358"
478"checksum lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a374c89b9db55895453a74c1e38861d9deec0b01b405a82516e9d5de4820dea1"
479"checksum libc 0.2.45 (registry+https://github.com/rust-lang/crates.io-index)" = "2d2857ec59fadc0773853c664d2d18e7198e83883e7060b63c924cb077bd5c74"
480"checksum libfuzzer-sys 0.1.0 (git+https://github.com/rust-fuzz/libfuzzer-sys.git)" = "<none>"
481"checksum lock_api 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "62ebf1391f6acad60e5c8b43706dde4582df75c06698ab44511d15016bc2442c"
482"checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945"
483"checksum num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0b3a5d7cc97d6d30d8b9bc8fa19bf45349ffe46241e8816f50f62f6d6aaabee1"
484"checksum owning_ref 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "49a4b8ea2179e6a2e27411d3bca09ca6dd630821cf6894c6c7c8467a8ee7ef13"
485"checksum parking_lot 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9723236a9525c757d9725b993511e3fc941e33f27751942232f0058298297edf"
486"checksum parking_lot_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "94c8c7923936b28d546dfd14d4472eaf34c99b14e1c973a32b3e6d4eb04298c9"
487"checksum proptest 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)" = "926d0604475349f463fe44130aae73f2294b5309ab2ca0310b998bd334ef191f"
488"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
489"checksum rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e464cd887e869cddcae8792a4ee31d23c7edd516700695608f5b98c67ee0131c"
490"checksum rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ae9d223d52ae411a33cf7e54ec6034ec165df296ccd23533d671a28252b6f66a"
491"checksum rand_chacha 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "771b009e3a508cb67e8823dda454aaa5368c7bc1c16829fb77d3e980440dd34a"
492"checksum rand_core 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1961a422c4d189dfb50ffa9320bf1f2a9bd54ecb92792fb9477f99a1045f3372"
493"checksum rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0905b6b7079ec73b314d4c748701f6931eb79fd97c668caa3f1899b22b32c6db"
494"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4"
495"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08"
496"checksum rand_pcg 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "086bd09a33c7044e56bb44d5bdde5a60e7f119a9e95b0775f545de759a32fe05"
497"checksum rand_xorshift 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "effa3fcaa47e18db002bdde6060944b6d2f9cfd8db471c30e873448ad9187be3"
498"checksum redox_syscall 0.1.50 (registry+https://github.com/rust-lang/crates.io-index)" = "52ee9a534dc1301776eff45b4fa92d2c39b1d8c3d3357e6eb593e0d795506fc2"
499"checksum regex-syntax 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4e47a2ed29da7a9e1960e1639e7a982e6edc6d49be308a3b02daf511504a16d1"
500"checksum remove_dir_all 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3488ba1b9a2084d38645c4c08276a1752dcbf2c7130d74f1569681ad5d2799c5"
501"checksum rowan 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ae9ae7dba5e703f423ceb8646d636c73e6d858a2f8c834808b4565e42ccda9e2"
502"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
503"checksum rusty-fork 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9591f190d2852720b679c21f66ad929f9f1d7bb09d1193c26167586029d8489c"
504"checksum scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "94258f53601af11e6a49f722422f6e3425c52b06245a5cf9bc09908b174f5e27"
505"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
506"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
507"checksum serde 1.0.84 (registry+https://github.com/rust-lang/crates.io-index)" = "0e732ed5a5592c17d961555e3b552985baf98d50ce418b7b655f31f6ba7eb1b7"
508"checksum smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)" = "b73ea3738b47563803ef814925e69be00799a8c07420be8b996f8e98fb2336db"
509"checksum smol_str 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9af1035bc5d742ab6b7ab16713e41cc2ffe78cb474f6f43cd696b2d16052007e"
510"checksum stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dba1a27d3efae4351c8051072d619e3ade2820635c3958d826bfea39d59b54c8"
511"checksum tempfile 3.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "7e91405c14320e5c79b3d148e1c86f40749a36e490642202a31689cb1a3452b2"
512"checksum text_unit 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "158bb1c22b638b1da3c95a8ad9f061ea40d4d39fd0301be3a520f92efeeb189e"
513"checksum ucd-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "535c204ee4d8434478593480b8f86ab45ec9aae0e83c568ca81abf0fd0e88f86"
514"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
515"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"
516"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
517"checksum wait-timeout 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "b9f3bf741a801531993db6478b95682117471f76916f5e690dd8d45395b09349"
518"checksum winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "92c1eb33641e276cfa214a0522acad57be5c56b10cb348b3c5117db75f3ac4b0"
519"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
520"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
diff --git a/crates/ra_syntax/fuzz/fuzz_targets/parser.rs b/crates/ra_syntax/fuzz/fuzz_targets/parser.rs
index 396c0ecaf..4667d5579 100644
--- a/crates/ra_syntax/fuzz/fuzz_targets/parser.rs
+++ b/crates/ra_syntax/fuzz/fuzz_targets/parser.rs
@@ -4,6 +4,6 @@ extern crate ra_syntax;
4 4
5fuzz_target!(|data: &[u8]| { 5fuzz_target!(|data: &[u8]| {
6 if let Ok(text) = std::str::from_utf8(data) { 6 if let Ok(text) = std::str::from_utf8(data) {
7 ra_syntax::utils::check_fuzz_invariants(text) 7 ra_syntax::check_fuzz_invariants(text)
8 } 8 }
9}); 9});
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs
index 99b0983b0..e2b4f0388 100644
--- a/crates/ra_syntax/src/algo.rs
+++ b/crates/ra_syntax/src/algo.rs
@@ -2,7 +2,7 @@ pub mod visit;
2 2
3use rowan::TransparentNewType; 3use rowan::TransparentNewType;
4 4
5use crate::{SyntaxNode, TextRange, TextUnit, AstNode}; 5use crate::{SyntaxNode, TextRange, TextUnit, AstNode, Direction};
6 6
7pub use rowan::LeafAtOffset; 7pub use rowan::LeafAtOffset;
8 8
@@ -29,10 +29,16 @@ pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) ->
29 find_leaf_at_offset(syntax, offset).find_map(|leaf| leaf.ancestors().find_map(N::cast)) 29 find_leaf_at_offset(syntax, offset).find_map(|leaf| leaf.ancestors().find_map(N::cast))
30} 30}
31 31
32/// Finds the first sibling in the given direction which is not `trivia`
33pub fn non_trivia_sibling(node: &SyntaxNode, direction: Direction) -> Option<&SyntaxNode> {
34 node.siblings(direction).skip(1).find(|node| !node.kind().is_trivia())
35}
36
32pub fn find_covering_node(root: &SyntaxNode, range: TextRange) -> &SyntaxNode { 37pub fn find_covering_node(root: &SyntaxNode, range: TextRange) -> &SyntaxNode {
33 SyntaxNode::from_repr(root.0.covering_node(range)) 38 SyntaxNode::from_repr(root.0.covering_node(range))
34} 39}
35 40
41// Replace with `std::iter::successors` in `1.34.0`
36pub fn generate<T>(seed: Option<T>, step: impl Fn(&T) -> Option<T>) -> impl Iterator<Item = T> { 42pub fn generate<T>(seed: Option<T>, step: impl Fn(&T) -> Option<T>) -> impl Iterator<Item = T> {
37 ::itertools::unfold(seed, move |slot| { 43 ::itertools::unfold(seed, move |slot| {
38 slot.take().map(|curr| { 44 slot.take().map(|curr| {
diff --git a/crates/ra_syntax/src/ast.rs b/crates/ra_syntax/src/ast.rs
index 62641c9fe..20e0a6856 100644
--- a/crates/ra_syntax/src/ast.rs
+++ b/crates/ra_syntax/src/ast.rs
@@ -1,3 +1,4 @@
1//! Abstract Syntax Tree, layered on top of untyped `SyntaxNode`s
1mod generated; 2mod generated;
2 3
3use std::marker::PhantomData; 4use std::marker::PhantomData;
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs
index b12282b39..e7d402446 100644
--- a/crates/ra_syntax/src/lib.rs
+++ b/crates/ra_syntax/src/lib.rs
@@ -1,22 +1,24 @@
1//! An experimental implementation of [Rust RFC#2256 libsyntax2.0][rfc#2256]. 1//! Syntax Tree library used throughout the rust analyzer.
2//! 2//!
3//! The intent is to be an IDE-ready parser, i.e. one that offers 3//! Properties:
4//! - easy and fast incremental re-parsing
5//! - graceful handling of errors
6//! - full-fidelity representation (*any* text can be precisely represented as
7//! a syntax tree)
4//! 8//!
5//! - easy and fast incremental re-parsing, 9//! For more information, see the [RFC]. Current implementation is inspired by
6//! - graceful handling of errors, and 10//! the [Swift] one.
7//! - maintains all information in the source file.
8//! 11//!
9//! For more information, see [the RFC][rfc#2265], or [the working draft][RFC.md]. 12//! The most interesting modules here are `syntax_node` (which defines concrete
13//! syntax tree) and `ast` (which defines abstract syntax tree on top of the
14//! CST). The actual parser live in a separate `ra_parser` crate, thought the
15//! lexer lives in this crate.
10//! 16//!
11//! [rfc#2256]: <https://github.com/rust-lang/rfcs/pull/2256> 17//! See `api_walkthrough` test in this file for a quick API tour!
12//! [RFC.md]: <https://github.com/matklad/libsyntax2/blob/master/docs/RFC.md> 18//!
13 19//! [RFC]: <https://github.com/rust-lang/rfcs/pull/2256>
14#![forbid(missing_debug_implementations, unconditional_recursion, future_incompatible)] 20//! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md>
15#![deny(bad_style, missing_docs)]
16#![allow(missing_docs)]
17//#![warn(unreachable_pub)] // rust-lang/rust#47816
18 21
19mod syntax_kinds;
20mod syntax_node; 22mod syntax_node;
21mod syntax_text; 23mod syntax_text;
22mod syntax_error; 24mod syntax_error;
@@ -27,13 +29,11 @@ mod ptr;
27 29
28pub mod algo; 30pub mod algo;
29pub mod ast; 31pub mod ast;
30/// Utilities for simple uses of the parser.
31pub mod utils;
32 32
33pub use rowan::{SmolStr, TextRange, TextUnit}; 33pub use rowan::{SmolStr, TextRange, TextUnit};
34pub use ra_parser::SyntaxKind;
34pub use crate::{ 35pub use crate::{
35 ast::AstNode, 36 ast::AstNode,
36 syntax_kinds::SyntaxKind,
37 syntax_error::{SyntaxError, SyntaxErrorKind, Location}, 37 syntax_error::{SyntaxError, SyntaxErrorKind, Location},
38 syntax_text::SyntaxText, 38 syntax_text::SyntaxText,
39 syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc}, 39 syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc},
@@ -51,7 +51,7 @@ impl SourceFile {
51 fn new(green: GreenNode, errors: Vec<SyntaxError>) -> TreeArc<SourceFile> { 51 fn new(green: GreenNode, errors: Vec<SyntaxError>) -> TreeArc<SourceFile> {
52 let root = SyntaxNode::new(green, errors); 52 let root = SyntaxNode::new(green, errors);
53 if cfg!(debug_assertions) { 53 if cfg!(debug_assertions) {
54 utils::validate_block_structure(&root); 54 validation::validate_block_structure(&root);
55 } 55 }
56 assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); 56 assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
57 TreeArc::cast(root) 57 TreeArc::cast(root)
@@ -82,3 +82,181 @@ impl SourceFile {
82 errors 82 errors
83 } 83 }
84} 84}
85
86pub fn check_fuzz_invariants(text: &str) {
87 let file = SourceFile::parse(text);
88 let root = file.syntax();
89 validation::validate_block_structure(root);
90 let _ = file.errors();
91}
92
93/// This test does not assert anything and instead just shows off the crate's
94/// API.
95#[test]
96fn api_walkthrough() {
97 use ast::{ModuleItemOwner, NameOwner};
98
99 let source_code = "
100 fn foo() {
101 1 + 1
102 }
103 ";
104 // `SourceFile` is the main entry point.
105 //
106 // Note how `parse` does not return a `Result`: even completely invalid
107 // source code might be parsed.
108 let file = SourceFile::parse(source_code);
109
110 // Due to the way ownership is set up, owned syntax Nodes always live behind
111 // a `TreeArc` smart pointer. `TreeArc` is roughly an `std::sync::Arc` which
112 // points to the whole file instead of an individual node.
113 let file: TreeArc<SourceFile> = file;
114
115 // `SourceFile` is the root of the syntax tree. We can iterate file's items:
116 let mut func = None;
117 for item in file.items() {
118 match item.kind() {
119 ast::ModuleItemKind::FnDef(f) => func = Some(f),
120 _ => unreachable!(),
121 }
122 }
123 // The returned items are always references.
124 let func: &ast::FnDef = func.unwrap();
125
126 // All nodes implement `ToOwned` trait, with `Owned = TreeArc<Self>`.
127 // `to_owned` is a cheap operation: atomic increment.
128 let _owned_func: TreeArc<ast::FnDef> = func.to_owned();
129
130 // Each AST node has a bunch of getters for children. All getters return
131 // `Option`s though, to account for incomplete code. Some getters are common
132 // for several kinds of node. In this case, a trait like `ast::NameOwner`
133 // usually exists. By convention, all ast types should be used with `ast::`
134 // qualifier.
135 let name: Option<&ast::Name> = func.name();
136 let name = name.unwrap();
137 assert_eq!(name.text(), "foo");
138
139 // Let's get the `1 + 1` expression!
140 let block: &ast::Block = func.body().unwrap();
141 let expr: &ast::Expr = block.expr().unwrap();
142
143 // "Enum"-like nodes are represented using the "kind" pattern. It allows us
144 // to match exhaustively against all flavors of nodes, while maintaining
145 // internal representation flexibility. The drawback is that one can't write
146 // nested matches as one pattern.
147 let bin_expr: &ast::BinExpr = match expr.kind() {
148 ast::ExprKind::BinExpr(e) => e,
149 _ => unreachable!(),
150 };
151
152 // Besides the "typed" AST API, there's an untyped CST one as well.
153 // To switch from AST to CST, call `.syntax()` method:
154 let expr_syntax: &SyntaxNode = expr.syntax();
155
156 // Note how `expr` and `bin_expr` are in fact the same node underneath:
157 assert!(std::ptr::eq(expr_syntax, bin_expr.syntax()));
158
159 // To go from CST to AST, `AstNode::cast` function is used:
160 let expr = match ast::Expr::cast(expr_syntax) {
161 Some(e) => e,
162 None => unreachable!(),
163 };
164
165 // Note how expr is also a reference!
166 let expr: &ast::Expr = expr;
167
168 // This is possible because the underlying representation is the same:
169 assert_eq!(
170 expr as *const ast::Expr as *const u8,
171 expr_syntax as *const SyntaxNode as *const u8
172 );
173
174 // The two properties each syntax node has is a `SyntaxKind`:
175 assert_eq!(expr_syntax.kind(), SyntaxKind::BIN_EXPR);
176
177 // And text range:
178 assert_eq!(expr_syntax.range(), TextRange::from_to(32.into(), 37.into()));
179
180 // You can get node's text as a `SyntaxText` object, which will traverse the
181 // tree collecting token's text:
182 let text: SyntaxText<'_> = expr_syntax.text();
183 assert_eq!(text.to_string(), "1 + 1");
184
185 // There's a bunch of traversal methods on `SyntaxNode`:
186 assert_eq!(expr_syntax.parent(), Some(block.syntax()));
187 assert_eq!(block.syntax().first_child().map(|it| it.kind()), Some(SyntaxKind::L_CURLY));
188 assert_eq!(expr_syntax.next_sibling().map(|it| it.kind()), Some(SyntaxKind::WHITESPACE));
189
190 // As well as some iterator helpers:
191 let f = expr_syntax.ancestors().find_map(ast::FnDef::cast);
192 assert_eq!(f, Some(&*func));
193 assert!(expr_syntax.siblings(Direction::Next).any(|it| it.kind() == SyntaxKind::R_CURLY));
194 assert_eq!(
195 expr_syntax.descendants().count(),
196 8, // 5 tokens `1`, ` `, `+`, ` `, `!`
197 // 2 child literal expressions: `1`, `1`
198 // 1 the node itself: `1 + 1`
199 );
200
201 // There's also a `preorder` method with a more fine-grained iteration control:
202 let mut buf = String::new();
203 let mut indent = 0;
204 for event in expr_syntax.preorder() {
205 match event {
206 WalkEvent::Enter(node) => {
207 buf += &format!(
208 "{:indent$}{:?} {:?}\n",
209 " ",
210 node.text(),
211 node.kind(),
212 indent = indent
213 );
214 indent += 2;
215 }
216 WalkEvent::Leave(_) => indent -= 2,
217 }
218 }
219 assert_eq!(indent, 0);
220 assert_eq!(
221 buf.trim(),
222 r#"
223"1 + 1" BIN_EXPR
224 "1" LITERAL
225 "1" INT_NUMBER
226 " " WHITESPACE
227 "+" PLUS
228 " " WHITESPACE
229 "1" LITERAL
230 "1" INT_NUMBER
231"#
232 .trim()
233 );
234
235 // To recursively process the tree, there are three approaches:
236 // 1. explicitly call getter methods on AST nodes.
237 // 2. use descendants and `AstNode::cast`.
238 // 3. use descendants and the visitor.
239 //
240 // Here's how the first one looks like:
241 let exprs_cast: Vec<String> = file
242 .syntax()
243 .descendants()
244 .filter_map(ast::Expr::cast)
245 .map(|expr| expr.syntax().text().to_string())
246 .collect();
247
248 // An alternative is to use a visitor. The visitor does not do traversal
249 // automatically (so it's more akin to a generic lambda) and is constructed
250 // from closures. This seems more flexible than a single generated visitor
251 // trait.
252 use algo::visit::{visitor, Visitor};
253 let mut exprs_visit = Vec::new();
254 for node in file.syntax().descendants() {
255 if let Some(result) =
256 visitor().visit::<ast::Expr, _>(|expr| expr.syntax().text().to_string()).accept(node)
257 {
258 exprs_visit.push(result);
259 }
260 }
261 assert_eq!(exprs_cast, exprs_visit);
262}
diff --git a/crates/ra_syntax/src/parsing.rs b/crates/ra_syntax/src/parsing.rs
index 138d1394a..cf573801c 100644
--- a/crates/ra_syntax/src/parsing.rs
+++ b/crates/ra_syntax/src/parsing.rs
@@ -1,78 +1,28 @@
1#[macro_use] 1//! Lexing, bridging to ra_parser (which does the actual parsing) and
2mod token_set; 2//! incremental reparsing.
3mod builder; 3
4mod lexer; 4mod lexer;
5mod event;
6mod input; 5mod input;
7mod parser; 6mod builder;
8mod grammar;
9mod reparsing; 7mod reparsing;
10 8
11use crate::{ 9use crate::{
12 SyntaxKind, SmolStr, SyntaxError, 10 SyntaxError,
11 syntax_node::GreenNode,
13 parsing::{ 12 parsing::{
14 builder::GreenBuilder, 13 builder::TreeBuilder,
15 input::ParserInput, 14 input::ParserInput,
16 event::EventProcessor,
17 parser::Parser,
18 }, 15 },
19 syntax_node::GreenNode,
20}; 16};
21 17
22pub use self::lexer::{tokenize, Token}; 18pub use self::lexer::{tokenize, Token};
23 19
24#[derive(Debug, Clone, PartialEq, Eq, Hash)]
25pub struct ParseError(pub String);
26
27pub(crate) use self::reparsing::incremental_reparse; 20pub(crate) use self::reparsing::incremental_reparse;
28 21
29pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { 22pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
30 let tokens = tokenize(&text); 23 let tokens = tokenize(&text);
31 parse_with(GreenBuilder::default(), text, &tokens, grammar::root) 24 let token_source = ParserInput::new(text, &tokens);
32} 25 let mut tree_sink = TreeBuilder::new(text, &tokens);
33 26 ra_parser::parse(&token_source, &mut tree_sink);
34fn parse_with<S: TreeSink>( 27 tree_sink.finish()
35 tree_sink: S,
36 text: &str,
37 tokens: &[Token],
38 f: fn(&mut Parser),
39) -> S::Tree {
40 let mut events = {
41 let input = ParserInput::new(text, &tokens);
42 let mut p = Parser::new(&input);
43 f(&mut p);
44 p.finish()
45 };
46 EventProcessor::new(tree_sink, text, tokens, &mut events).process().finish()
47}
48
49/// `TreeSink` abstracts details of a particular syntax tree implementation.
50trait TreeSink {
51 type Tree;
52
53 /// Adds new leaf to the current branch.
54 fn leaf(&mut self, kind: SyntaxKind, text: SmolStr);
55
56 /// Start new branch and make it current.
57 fn start_branch(&mut self, kind: SyntaxKind);
58
59 /// Finish current branch and restore previous
60 /// branch as current.
61 fn finish_branch(&mut self);
62
63 fn error(&mut self, error: ParseError);
64
65 /// Complete tree building. Make sure that
66 /// `start_branch` and `finish_branch` calls
67 /// are paired!
68 fn finish(self) -> Self::Tree;
69}
70
71/// `TokenSource` abstracts the source of the tokens parser operates one.
72///
73/// Hopefully this will allow us to treat text and token trees in the same way!
74trait TokenSource {
75 fn token_kind(&self, pos: usize) -> SyntaxKind;
76 fn is_token_joint_to_next(&self, pos: usize) -> bool;
77 fn is_keyword(&self, pos: usize, kw: &str) -> bool;
78} 28}
diff --git a/crates/ra_syntax/src/parsing/builder.rs b/crates/ra_syntax/src/parsing/builder.rs
index ee0e2cce7..cfe3139b8 100644
--- a/crates/ra_syntax/src/parsing/builder.rs
+++ b/crates/ra_syntax/src/parsing/builder.rs
@@ -1,49 +1,170 @@
1use std::mem;
2
3use ra_parser::{TreeSink, ParseError};
4use rowan::GreenNodeBuilder;
5
1use crate::{ 6use crate::{
2 SmolStr, SyntaxKind, SyntaxError, SyntaxErrorKind, TextUnit, 7 SmolStr, SyntaxError, SyntaxErrorKind, TextUnit, TextRange,
3 parsing::{TreeSink, ParseError}, 8 SyntaxKind::{self, *},
9 parsing::Token,
4 syntax_node::{GreenNode, RaTypes}, 10 syntax_node::{GreenNode, RaTypes},
5}; 11};
6 12
7use rowan::GreenNodeBuilder; 13/// Bridges the parser with our specific syntax tree representation.
8 14///
9pub(crate) struct GreenBuilder { 15/// `TreeBuilder` also handles attachment of trivia (whitespace) to nodes.
16pub(crate) struct TreeBuilder<'a> {
17 text: &'a str,
18 tokens: &'a [Token],
10 text_pos: TextUnit, 19 text_pos: TextUnit,
20 token_pos: usize,
21 state: State,
11 errors: Vec<SyntaxError>, 22 errors: Vec<SyntaxError>,
12 inner: GreenNodeBuilder<RaTypes>, 23 inner: GreenNodeBuilder<RaTypes>,
13} 24}
14 25
15impl Default for GreenBuilder { 26enum State {
16 fn default() -> GreenBuilder { 27 PendingStart,
17 GreenBuilder { 28 Normal,
18 text_pos: TextUnit::default(), 29 PendingFinish,
19 errors: Vec::new(),
20 inner: GreenNodeBuilder::new(),
21 }
22 }
23} 30}
24 31
25impl TreeSink for GreenBuilder { 32impl<'a> TreeSink for TreeBuilder<'a> {
26 type Tree = (GreenNode, Vec<SyntaxError>); 33 fn leaf(&mut self, kind: SyntaxKind, n_tokens: u8) {
27 34 match mem::replace(&mut self.state, State::Normal) {
28 fn leaf(&mut self, kind: SyntaxKind, text: SmolStr) { 35 State::PendingStart => unreachable!(),
29 self.text_pos += TextUnit::of_str(text.as_str()); 36 State::PendingFinish => self.inner.finish_internal(),
30 self.inner.leaf(kind, text); 37 State::Normal => (),
38 }
39 self.eat_trivias();
40 let n_tokens = n_tokens as usize;
41 let len = self.tokens[self.token_pos..self.token_pos + n_tokens]
42 .iter()
43 .map(|it| it.len)
44 .sum::<TextUnit>();
45 self.do_leaf(kind, len, n_tokens);
31 } 46 }
32 47
33 fn start_branch(&mut self, kind: SyntaxKind) { 48 fn start_branch(&mut self, kind: SyntaxKind) {
34 self.inner.start_internal(kind) 49 match mem::replace(&mut self.state, State::Normal) {
50 State::PendingStart => {
51 self.inner.start_internal(kind);
52 // No need to attach trivias to previous node: there is no
53 // previous node.
54 return;
55 }
56 State::PendingFinish => self.inner.finish_internal(),
57 State::Normal => (),
58 }
59
60 let n_trivias =
61 self.tokens[self.token_pos..].iter().take_while(|it| it.kind.is_trivia()).count();
62 let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias];
63 let mut trivia_end =
64 self.text_pos + leading_trivias.iter().map(|it| it.len).sum::<TextUnit>();
65
66 let n_attached_trivias = {
67 let leading_trivias = leading_trivias.iter().rev().map(|it| {
68 let next_end = trivia_end - it.len;
69 let range = TextRange::from_to(next_end, trivia_end);
70 trivia_end = next_end;
71 (it.kind, &self.text[range])
72 });
73 n_attached_trivias(kind, leading_trivias)
74 };
75 self.eat_n_trivias(n_trivias - n_attached_trivias);
76 self.inner.start_internal(kind);
77 self.eat_n_trivias(n_attached_trivias);
35 } 78 }
36 79
37 fn finish_branch(&mut self) { 80 fn finish_branch(&mut self) {
38 self.inner.finish_internal(); 81 match mem::replace(&mut self.state, State::PendingFinish) {
82 State::PendingStart => unreachable!(),
83 State::PendingFinish => self.inner.finish_internal(),
84 State::Normal => (),
85 }
39 } 86 }
40 87
41 fn error(&mut self, error: ParseError) { 88 fn error(&mut self, error: ParseError) {
42 let error = SyntaxError::new(SyntaxErrorKind::ParseError(error), self.text_pos); 89 let error = SyntaxError::new(SyntaxErrorKind::ParseError(error), self.text_pos);
43 self.errors.push(error) 90 self.errors.push(error)
44 } 91 }
92}
93
94impl<'a> TreeBuilder<'a> {
95 pub(super) fn new(text: &'a str, tokens: &'a [Token]) -> TreeBuilder<'a> {
96 TreeBuilder {
97 text,
98 tokens,
99 text_pos: 0.into(),
100 token_pos: 0,
101 state: State::PendingStart,
102 errors: Vec::new(),
103 inner: GreenNodeBuilder::new(),
104 }
105 }
106
107 pub(super) fn finish(mut self) -> (GreenNode, Vec<SyntaxError>) {
108 match mem::replace(&mut self.state, State::Normal) {
109 State::PendingFinish => {
110 self.eat_trivias();
111 self.inner.finish_internal()
112 }
113 State::PendingStart | State::Normal => unreachable!(),
114 }
45 115
46 fn finish(self) -> (GreenNode, Vec<SyntaxError>) {
47 (self.inner.finish(), self.errors) 116 (self.inner.finish(), self.errors)
48 } 117 }
118
119 fn eat_trivias(&mut self) {
120 while let Some(&token) = self.tokens.get(self.token_pos) {
121 if !token.kind.is_trivia() {
122 break;
123 }
124 self.do_leaf(token.kind, token.len, 1);
125 }
126 }
127
128 fn eat_n_trivias(&mut self, n: usize) {
129 for _ in 0..n {
130 let token = self.tokens[self.token_pos];
131 assert!(token.kind.is_trivia());
132 self.do_leaf(token.kind, token.len, 1);
133 }
134 }
135
136 fn do_leaf(&mut self, kind: SyntaxKind, len: TextUnit, n_tokens: usize) {
137 let range = TextRange::offset_len(self.text_pos, len);
138 let text: SmolStr = self.text[range].into();
139 self.text_pos += len;
140 self.token_pos += n_tokens;
141 self.inner.leaf(kind, text);
142 }
143}
144
145fn n_attached_trivias<'a>(
146 kind: SyntaxKind,
147 trivias: impl Iterator<Item = (SyntaxKind, &'a str)>,
148) -> usize {
149 match kind {
150 CONST_DEF | TYPE_DEF | STRUCT_DEF | ENUM_DEF | ENUM_VARIANT | FN_DEF | TRAIT_DEF
151 | MODULE | NAMED_FIELD_DEF => {
152 let mut res = 0;
153 for (i, (kind, text)) in trivias.enumerate() {
154 match kind {
155 WHITESPACE => {
156 if text.contains("\n\n") {
157 break;
158 }
159 }
160 COMMENT => {
161 res = i + 1;
162 }
163 _ => (),
164 }
165 }
166 res
167 }
168 _ => 0,
169 }
49} 170}
diff --git a/crates/ra_syntax/src/parsing/event.rs b/crates/ra_syntax/src/parsing/event.rs
deleted file mode 100644
index f6f020eab..000000000
--- a/crates/ra_syntax/src/parsing/event.rs
+++ /dev/null
@@ -1,247 +0,0 @@
1//! This module provides a way to construct a `File`.
2//! It is intended to be completely decoupled from the
3//! parser, so as to allow to evolve the tree representation
4//! and the parser algorithm independently.
5//!
6//! The `TreeSink` trait is the bridge between the parser and the
7//! tree builder: the parser produces a stream of events like
8//! `start node`, `finish node`, and `FileBuilder` converts
9//! this stream to a real tree.
10use std::mem;
11
12use crate::{
13 SmolStr,
14 SyntaxKind::{self, *},
15 TextRange, TextUnit,
16 parsing::{
17 ParseError, TreeSink,
18 lexer::Token,
19 },
20};
21
22/// `Parser` produces a flat list of `Event`s.
23/// They are converted to a tree-structure in
24/// a separate pass, via `TreeBuilder`.
25#[derive(Debug)]
26pub(crate) enum Event {
27 /// This event signifies the start of the node.
28 /// It should be either abandoned (in which case the
29 /// `kind` is `TOMBSTONE`, and the event is ignored),
30 /// or completed via a `Finish` event.
31 ///
32 /// All tokens between a `Start` and a `Finish` would
33 /// become the children of the respective node.
34 ///
35 /// For left-recursive syntactic constructs, the parser produces
36 /// a child node before it sees a parent. `forward_parent`
37 /// saves the position of current event's parent.
38 ///
39 /// Consider this path
40 ///
41 /// foo::bar
42 ///
43 /// The events for it would look like this:
44 ///
45 ///
46 /// START(PATH) IDENT('foo') FINISH START(PATH) COLONCOLON IDENT('bar') FINISH
47 /// | /\
48 /// | |
49 /// +------forward-parent------+
50 ///
51 /// And the tree would look like this
52 ///
53 /// +--PATH---------+
54 /// | | |
55 /// | | |
56 /// | '::' 'bar'
57 /// |
58 /// PATH
59 /// |
60 /// 'foo'
61 ///
62 /// See also `CompletedMarker::precede`.
63 Start {
64 kind: SyntaxKind,
65 forward_parent: Option<u32>,
66 },
67
68 /// Complete the previous `Start` event
69 Finish,
70
71 /// Produce a single leaf-element.
72 /// `n_raw_tokens` is used to glue complex contextual tokens.
73 /// For example, lexer tokenizes `>>` as `>`, `>`, and
74 /// `n_raw_tokens = 2` is used to produced a single `>>`.
75 Token {
76 kind: SyntaxKind,
77 n_raw_tokens: u8,
78 },
79
80 Error {
81 msg: ParseError,
82 },
83}
84
85impl Event {
86 pub(crate) fn tombstone() -> Self {
87 Event::Start { kind: TOMBSTONE, forward_parent: None }
88 }
89}
90
91pub(super) struct EventProcessor<'a, S: TreeSink> {
92 sink: S,
93 text_pos: TextUnit,
94 text: &'a str,
95 token_pos: usize,
96 tokens: &'a [Token],
97 events: &'a mut [Event],
98}
99
100impl<'a, S: TreeSink> EventProcessor<'a, S> {
101 pub(super) fn new(
102 sink: S,
103 text: &'a str,
104 tokens: &'a [Token],
105 events: &'a mut [Event],
106 ) -> EventProcessor<'a, S> {
107 EventProcessor { sink, text_pos: 0.into(), text, token_pos: 0, tokens, events }
108 }
109
110 /// Generate the syntax tree with the control of events.
111 pub(crate) fn process(mut self) -> S {
112 let mut forward_parents = Vec::new();
113
114 for i in 0..self.events.len() {
115 match mem::replace(&mut self.events[i], Event::tombstone()) {
116 Event::Start { kind: TOMBSTONE, .. } => (),
117
118 Event::Start { kind, forward_parent } => {
119 // For events[A, B, C], B is A's forward_parent, C is B's forward_parent,
120 // in the normal control flow, the parent-child relation: `A -> B -> C`,
121 // while with the magic forward_parent, it writes: `C <- B <- A`.
122
123 // append `A` into parents.
124 forward_parents.push(kind);
125 let mut idx = i;
126 let mut fp = forward_parent;
127 while let Some(fwd) = fp {
128 idx += fwd as usize;
129 // append `A`'s forward_parent `B`
130 fp = match mem::replace(&mut self.events[idx], Event::tombstone()) {
131 Event::Start { kind, forward_parent } => {
132 forward_parents.push(kind);
133 forward_parent
134 }
135 _ => unreachable!(),
136 };
137 // append `B`'s forward_parent `C` in the next stage.
138 }
139
140 for kind in forward_parents.drain(..).rev() {
141 self.start(kind);
142 }
143 }
144 Event::Finish => {
145 let is_last = i == self.events.len() - 1;
146 self.finish(is_last);
147 }
148 Event::Token { kind, n_raw_tokens } => {
149 self.eat_trivias();
150 let n_raw_tokens = n_raw_tokens as usize;
151 let len = self.tokens[self.token_pos..self.token_pos + n_raw_tokens]
152 .iter()
153 .map(|it| it.len)
154 .sum::<TextUnit>();
155 self.leaf(kind, len, n_raw_tokens);
156 }
157 Event::Error { msg } => self.sink.error(msg),
158 }
159 }
160 self.sink
161 }
162
163 /// Add the node into syntax tree but discard the comments/whitespaces.
164 fn start(&mut self, kind: SyntaxKind) {
165 if kind == SOURCE_FILE {
166 self.sink.start_branch(kind);
167 return;
168 }
169 let n_trivias =
170 self.tokens[self.token_pos..].iter().take_while(|it| it.kind.is_trivia()).count();
171 let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias];
172 let mut trivia_end =
173 self.text_pos + leading_trivias.iter().map(|it| it.len).sum::<TextUnit>();
174
175 let n_attached_trivias = {
176 let leading_trivias = leading_trivias.iter().rev().map(|it| {
177 let next_end = trivia_end - it.len;
178 let range = TextRange::from_to(next_end, trivia_end);
179 trivia_end = next_end;
180 (it.kind, &self.text[range])
181 });
182 n_attached_trivias(kind, leading_trivias)
183 };
184 self.eat_n_trivias(n_trivias - n_attached_trivias);
185 self.sink.start_branch(kind);
186 self.eat_n_trivias(n_attached_trivias);
187 }
188
189 fn finish(&mut self, is_last: bool) {
190 if is_last {
191 self.eat_trivias()
192 }
193 self.sink.finish_branch();
194 }
195
196 fn eat_trivias(&mut self) {
197 while let Some(&token) = self.tokens.get(self.token_pos) {
198 if !token.kind.is_trivia() {
199 break;
200 }
201 self.leaf(token.kind, token.len, 1);
202 }
203 }
204
205 fn eat_n_trivias(&mut self, n: usize) {
206 for _ in 0..n {
207 let token = self.tokens[self.token_pos];
208 assert!(token.kind.is_trivia());
209 self.leaf(token.kind, token.len, 1);
210 }
211 }
212
213 fn leaf(&mut self, kind: SyntaxKind, len: TextUnit, n_tokens: usize) {
214 let range = TextRange::offset_len(self.text_pos, len);
215 let text: SmolStr = self.text[range].into();
216 self.text_pos += len;
217 self.token_pos += n_tokens;
218 self.sink.leaf(kind, text);
219 }
220}
221
222fn n_attached_trivias<'a>(
223 kind: SyntaxKind,
224 trivias: impl Iterator<Item = (SyntaxKind, &'a str)>,
225) -> usize {
226 match kind {
227 CONST_DEF | TYPE_DEF | STRUCT_DEF | ENUM_DEF | ENUM_VARIANT | FN_DEF | TRAIT_DEF
228 | MODULE | NAMED_FIELD_DEF => {
229 let mut res = 0;
230 for (i, (kind, text)) in trivias.enumerate() {
231 match kind {
232 WHITESPACE => {
233 if text.contains("\n\n") {
234 break;
235 }
236 }
237 COMMENT => {
238 res = i + 1;
239 }
240 _ => (),
241 }
242 }
243 res
244 }
245 _ => 0,
246 }
247}
diff --git a/crates/ra_syntax/src/parsing/input.rs b/crates/ra_syntax/src/parsing/input.rs
index 96c03bb11..31c6a3b9b 100644
--- a/crates/ra_syntax/src/parsing/input.rs
+++ b/crates/ra_syntax/src/parsing/input.rs
@@ -1,11 +1,30 @@
1use ra_parser::TokenSource;
2
1use crate::{ 3use crate::{
2 SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit, 4 SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit,
3 parsing::{ 5 parsing::lexer::Token,
4 TokenSource,
5 lexer::Token,
6 },
7}; 6};
8 7
8pub(crate) struct ParserInput<'t> {
9 text: &'t str,
10 /// start position of each token(expect whitespace and comment)
11 /// ```non-rust
12 /// struct Foo;
13 /// ^------^---
14 /// | | ^-
15 /// 0 7 10
16 /// ```
17 /// (token, start_offset): `[(struct, 0), (Foo, 7), (;, 10)]`
18 start_offsets: Vec<TextUnit>,
19 /// non-whitespace/comment tokens
20 /// ```non-rust
21 /// struct Foo {}
22 /// ^^^^^^ ^^^ ^^
23 /// ```
24 /// tokens: `[struct, Foo, {, }]`
25 tokens: Vec<Token>,
26}
27
9impl<'t> TokenSource for ParserInput<'t> { 28impl<'t> TokenSource for ParserInput<'t> {
10 fn token_kind(&self, pos: usize) -> SyntaxKind { 29 fn token_kind(&self, pos: usize) -> SyntaxKind {
11 if !(pos < self.tokens.len()) { 30 if !(pos < self.tokens.len()) {
@@ -29,26 +48,6 @@ impl<'t> TokenSource for ParserInput<'t> {
29 } 48 }
30} 49}
31 50
32pub(crate) struct ParserInput<'t> {
33 text: &'t str,
34 /// start position of each token(expect whitespace and comment)
35 /// ```non-rust
36 /// struct Foo;
37 /// ^------^---
38 /// | | ^-
39 /// 0 7 10
40 /// ```
41 /// (token, start_offset): `[(struct, 0), (Foo, 7), (;, 10)]`
42 start_offsets: Vec<TextUnit>,
43 /// non-whitespace/comment tokens
44 /// ```non-rust
45 /// struct Foo {}
46 /// ^^^^^^ ^^^ ^^
47 /// ```
48 /// tokens: `[struct, Foo, {, }]`
49 tokens: Vec<Token>,
50}
51
52impl<'t> ParserInput<'t> { 51impl<'t> ParserInput<'t> {
53 /// Generate input from tokens(expect comment and whitespace). 52 /// Generate input from tokens(expect comment and whitespace).
54 pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> ParserInput<'t> { 53 pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> ParserInput<'t> {
diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs
index f2d218ab9..19d8adcfb 100644
--- a/crates/ra_syntax/src/parsing/reparsing.rs
+++ b/crates/ra_syntax/src/parsing/reparsing.rs
@@ -1,18 +1,25 @@
1//! Implementation of incremental re-parsing.
2//!
3//! We use two simple strategies for this:
4//! - if the edit modifies only a single token (like changing an identifier's
5//! letter), we replace only this token.
6//! - otherwise, we search for the nearest `{}` block which contains the edit
7//! and try to parse only this block.
8
9use ra_text_edit::AtomTextEdit;
10use ra_parser::Reparser;
11
1use crate::{ 12use crate::{
2 SyntaxKind::*, TextRange, TextUnit, 13 SyntaxKind::*, TextRange, TextUnit, SyntaxError,
3 algo, 14 algo,
4 syntax_node::{GreenNode, SyntaxNode}, 15 syntax_node::{GreenNode, SyntaxNode},
5 syntax_error::SyntaxError,
6 parsing::{ 16 parsing::{
7 grammar, parse_with, 17 input::ParserInput,
8 builder::GreenBuilder, 18 builder::TreeBuilder,
9 parser::Parser,
10 lexer::{tokenize, Token}, 19 lexer::{tokenize, Token},
11 } 20 }
12}; 21};
13 22
14use ra_text_edit::AtomTextEdit;
15
16pub(crate) fn incremental_reparse( 23pub(crate) fn incremental_reparse(
17 node: &SyntaxNode, 24 node: &SyntaxNode,
18 edit: &AtomTextEdit, 25 edit: &AtomTextEdit,
@@ -61,7 +68,10 @@ fn reparse_block<'node>(
61 if !is_balanced(&tokens) { 68 if !is_balanced(&tokens) {
62 return None; 69 return None;
63 } 70 }
64 let (green, new_errors) = parse_with(GreenBuilder::default(), &text, &tokens, reparser); 71 let token_source = ParserInput::new(&text, &tokens);
72 let mut tree_sink = TreeBuilder::new(&text, &tokens);
73 reparser.parse(&token_source, &mut tree_sink);
74 let (green, new_errors) = tree_sink.finish();
65 Some((node, green, new_errors)) 75 Some((node, green, new_errors))
66} 76}
67 77
@@ -77,12 +87,13 @@ fn is_contextual_kw(text: &str) -> bool {
77 } 87 }
78} 88}
79 89
80fn find_reparsable_node( 90fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(&SyntaxNode, Reparser)> {
81 node: &SyntaxNode,
82 range: TextRange,
83) -> Option<(&SyntaxNode, fn(&mut Parser))> {
84 let node = algo::find_covering_node(node, range); 91 let node = algo::find_covering_node(node, range);
85 node.ancestors().find_map(|node| grammar::reparser(node).map(|r| (node, r))) 92 node.ancestors().find_map(|node| {
93 let first_child = node.first_child().map(|it| it.kind());
94 let parent = node.parent().map(|it| it.kind());
95 Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r))
96 })
86} 97}
87 98
88fn is_balanced(tokens: &[Token]) -> bool { 99fn is_balanced(tokens: &[Token]) -> bool {
@@ -132,7 +143,7 @@ fn merge_errors(
132mod tests { 143mod tests {
133 use test_utils::{extract_range, assert_eq_text}; 144 use test_utils::{extract_range, assert_eq_text};
134 145
135 use crate::{SourceFile, AstNode, utils::dump_tree}; 146 use crate::{SourceFile, AstNode};
136 use super::*; 147 use super::*;
137 148
138 fn do_check<F>(before: &str, replace_with: &str, reparser: F) 149 fn do_check<F>(before: &str, replace_with: &str, reparser: F)
@@ -158,8 +169,8 @@ mod tests {
158 }; 169 };
159 170
160 assert_eq_text!( 171 assert_eq_text!(
161 &dump_tree(fully_reparsed.syntax()), 172 &fully_reparsed.syntax().debug_dump(),
162 &dump_tree(incrementally_reparsed.syntax()), 173 &incrementally_reparsed.syntax().debug_dump(),
163 ) 174 )
164 } 175 }
165 176
diff --git a/crates/ra_syntax/src/syntax_error.rs b/crates/ra_syntax/src/syntax_error.rs
index 1a00fcc27..bdd431742 100644
--- a/crates/ra_syntax/src/syntax_error.rs
+++ b/crates/ra_syntax/src/syntax_error.rs
@@ -1,6 +1,8 @@
1use std::fmt; 1use std::fmt;
2 2
3use crate::{TextRange, TextUnit, parsing::ParseError}; 3use ra_parser::ParseError;
4
5use crate::{TextRange, TextUnit};
4 6
5#[derive(Debug, Clone, PartialEq, Eq, Hash)] 7#[derive(Debug, Clone, PartialEq, Eq, Hash)]
6pub struct SyntaxError { 8pub struct SyntaxError {
diff --git a/crates/ra_syntax/src/syntax_node.rs b/crates/ra_syntax/src/syntax_node.rs
index aa627398d..4d54ae614 100644
--- a/crates/ra_syntax/src/syntax_node.rs
+++ b/crates/ra_syntax/src/syntax_node.rs
@@ -1,9 +1,20 @@
1use std::{fmt, borrow::Borrow}; 1//! This module defines Concrete Syntax Tree (CST), used by rust-analyzer.
2//!
3//! The CST includes comments and whitespace, provides a single node type,
4//! `SyntaxNode`, and a basic traversal API (parent, children, siblings).
5//!
6//! The *real* implementation is in the (language-agnostic) `rowan` crate, this
7//! modules just wraps its API.
8
9use std::{
10 fmt::{self, Write},
11 borrow::Borrow,
12};
2 13
3use rowan::{Types, TransparentNewType}; 14use rowan::{Types, TransparentNewType};
4 15
5use crate::{ 16use crate::{
6 SmolStr, SyntaxKind, TextRange, SyntaxText, 17 SmolStr, SyntaxKind, TextRange, SyntaxText, SourceFile, AstNode,
7 syntax_error::SyntaxError, 18 syntax_error::SyntaxError,
8}; 19};
9 20
@@ -16,14 +27,17 @@ impl Types for RaTypes {
16 type RootData = Vec<SyntaxError>; 27 type RootData = Vec<SyntaxError>;
17} 28}
18 29
19pub type GreenNode = rowan::GreenNode<RaTypes>; 30pub(crate) type GreenNode = rowan::GreenNode<RaTypes>;
31
32/// Marker trait for CST and AST nodes
33pub trait SyntaxNodeWrapper: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>> {}
34impl<T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>> SyntaxNodeWrapper for T {}
20 35
36/// An owning smart pointer for CST or AST node.
21#[derive(PartialEq, Eq, Hash)] 37#[derive(PartialEq, Eq, Hash)]
22pub struct TreeArc<T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>>( 38pub struct TreeArc<T: SyntaxNodeWrapper>(pub(crate) rowan::TreeArc<RaTypes, T>);
23 pub(crate) rowan::TreeArc<RaTypes, T>,
24);
25 39
26impl<T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>> Borrow<T> for TreeArc<T> { 40impl<T: SyntaxNodeWrapper> Borrow<T> for TreeArc<T> {
27 fn borrow(&self) -> &T { 41 fn borrow(&self) -> &T {
28 &*self 42 &*self
29 } 43 }
@@ -31,11 +45,11 @@ impl<T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>> Borrow<T> for Tre
31 45
32impl<T> TreeArc<T> 46impl<T> TreeArc<T>
33where 47where
34 T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>, 48 T: SyntaxNodeWrapper,
35{ 49{
36 pub(crate) fn cast<U>(this: TreeArc<T>) -> TreeArc<U> 50 pub(crate) fn cast<U>(this: TreeArc<T>) -> TreeArc<U>
37 where 51 where
38 U: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>, 52 U: SyntaxNodeWrapper,
39 { 53 {
40 TreeArc(rowan::TreeArc::cast(this.0)) 54 TreeArc(rowan::TreeArc::cast(this.0))
41 } 55 }
@@ -43,7 +57,7 @@ where
43 57
44impl<T> std::ops::Deref for TreeArc<T> 58impl<T> std::ops::Deref for TreeArc<T>
45where 59where
46 T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>, 60 T: SyntaxNodeWrapper,
47{ 61{
48 type Target = T; 62 type Target = T;
49 fn deref(&self) -> &T { 63 fn deref(&self) -> &T {
@@ -53,7 +67,7 @@ where
53 67
54impl<T> PartialEq<T> for TreeArc<T> 68impl<T> PartialEq<T> for TreeArc<T>
55where 69where
56 T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>, 70 T: SyntaxNodeWrapper,
57 T: PartialEq<T>, 71 T: PartialEq<T>,
58{ 72{
59 fn eq(&self, other: &T) -> bool { 73 fn eq(&self, other: &T) -> bool {
@@ -64,7 +78,7 @@ where
64 78
65impl<T> Clone for TreeArc<T> 79impl<T> Clone for TreeArc<T>
66where 80where
67 T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>, 81 T: SyntaxNodeWrapper,
68{ 82{
69 fn clone(&self) -> TreeArc<T> { 83 fn clone(&self) -> TreeArc<T> {
70 TreeArc(self.0.clone()) 84 TreeArc(self.0.clone())
@@ -73,7 +87,7 @@ where
73 87
74impl<T> fmt::Debug for TreeArc<T> 88impl<T> fmt::Debug for TreeArc<T>
75where 89where
76 T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>, 90 T: SyntaxNodeWrapper,
77 T: fmt::Debug, 91 T: fmt::Debug,
78{ 92{
79 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { 93 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
@@ -88,13 +102,24 @@ unsafe impl TransparentNewType for SyntaxNode {
88 type Repr = rowan::SyntaxNode<RaTypes>; 102 type Repr = rowan::SyntaxNode<RaTypes>;
89} 103}
90 104
91impl SyntaxNode { 105impl ToOwned for SyntaxNode {
92 pub(crate) fn new(green: GreenNode, errors: Vec<SyntaxError>) -> TreeArc<SyntaxNode> { 106 type Owned = TreeArc<SyntaxNode>;
93 let ptr = TreeArc(rowan::SyntaxNode::new(green, errors)); 107 fn to_owned(&self) -> TreeArc<SyntaxNode> {
108 let ptr = TreeArc(self.0.to_owned());
94 TreeArc::cast(ptr) 109 TreeArc::cast(ptr)
95 } 110 }
96} 111}
97 112
113impl fmt::Debug for SyntaxNode {
114 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
115 write!(fmt, "{:?}@{:?}", self.kind(), self.range())?;
116 if has_short_text(self.kind()) {
117 write!(fmt, " \"{}\"", self.text())?;
118 }
119 Ok(())
120 }
121}
122
98#[derive(Debug, Clone, Copy, PartialEq, Eq)] 123#[derive(Debug, Clone, Copy, PartialEq, Eq)]
99pub enum Direction { 124pub enum Direction {
100 Next, 125 Next,
@@ -102,48 +127,10 @@ pub enum Direction {
102} 127}
103 128
104impl SyntaxNode { 129impl SyntaxNode {
105 pub fn leaf_text(&self) -> Option<&SmolStr> { 130 pub(crate) fn new(green: GreenNode, errors: Vec<SyntaxError>) -> TreeArc<SyntaxNode> {
106 self.0.leaf_text() 131 let ptr = TreeArc(rowan::SyntaxNode::new(green, errors));
107 }
108 pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode> {
109 crate::algo::generate(Some(self), |&node| node.parent())
110 }
111 pub fn descendants(&self) -> impl Iterator<Item = &SyntaxNode> {
112 self.preorder().filter_map(|event| match event {
113 WalkEvent::Enter(node) => Some(node),
114 WalkEvent::Leave(_) => None,
115 })
116 }
117 pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &SyntaxNode> {
118 crate::algo::generate(Some(self), move |&node| match direction {
119 Direction::Next => node.next_sibling(),
120 Direction::Prev => node.prev_sibling(),
121 })
122 }
123 pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&SyntaxNode>> {
124 self.0.preorder().map(|event| match event {
125 WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode::from_repr(n)),
126 WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxNode::from_repr(n)),
127 })
128 }
129}
130
131impl ToOwned for SyntaxNode {
132 type Owned = TreeArc<SyntaxNode>;
133 fn to_owned(&self) -> TreeArc<SyntaxNode> {
134 let ptr = TreeArc(self.0.to_owned());
135 TreeArc::cast(ptr) 132 TreeArc::cast(ptr)
136 } 133 }
137}
138
139impl SyntaxNode {
140 pub(crate) fn root_data(&self) -> &Vec<SyntaxError> {
141 self.0.root_data()
142 }
143
144 pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode {
145 self.0.replace_self(replacement)
146 }
147 134
148 pub fn kind(&self) -> SyntaxKind { 135 pub fn kind(&self) -> SyntaxKind {
149 self.0.kind() 136 self.0.kind()
@@ -161,6 +148,10 @@ impl SyntaxNode {
161 self.0.is_leaf() 148 self.0.is_leaf()
162 } 149 }
163 150
151 pub fn leaf_text(&self) -> Option<&SmolStr> {
152 self.0.leaf_text()
153 }
154
164 pub fn parent(&self) -> Option<&SyntaxNode> { 155 pub fn parent(&self) -> Option<&SyntaxNode> {
165 self.0.parent().map(SyntaxNode::from_repr) 156 self.0.parent().map(SyntaxNode::from_repr)
166 } 157 }
@@ -185,18 +176,85 @@ impl SyntaxNode {
185 SyntaxNodeChildren(self.0.children()) 176 SyntaxNodeChildren(self.0.children())
186 } 177 }
187 178
179 pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode> {
180 crate::algo::generate(Some(self), |&node| node.parent())
181 }
182
183 pub fn descendants(&self) -> impl Iterator<Item = &SyntaxNode> {
184 self.preorder().filter_map(|event| match event {
185 WalkEvent::Enter(node) => Some(node),
186 WalkEvent::Leave(_) => None,
187 })
188 }
189
190 pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &SyntaxNode> {
191 crate::algo::generate(Some(self), move |&node| match direction {
192 Direction::Next => node.next_sibling(),
193 Direction::Prev => node.prev_sibling(),
194 })
195 }
196
197 pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&SyntaxNode>> {
198 self.0.preorder().map(|event| match event {
199 WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode::from_repr(n)),
200 WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxNode::from_repr(n)),
201 })
202 }
203
188 pub fn memory_size_of_subtree(&self) -> usize { 204 pub fn memory_size_of_subtree(&self) -> usize {
189 self.0.memory_size_of_subtree() 205 self.0.memory_size_of_subtree()
190 } 206 }
191}
192 207
193impl fmt::Debug for SyntaxNode { 208 pub fn debug_dump(&self) -> String {
194 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { 209 let mut errors: Vec<_> = match self.ancestors().find_map(SourceFile::cast) {
195 write!(fmt, "{:?}@{:?}", self.kind(), self.range())?; 210 Some(file) => file.errors(),
196 if has_short_text(self.kind()) { 211 None => self.root_data().to_vec(),
197 write!(fmt, " \"{}\"", self.text())?; 212 };
213 errors.sort_by_key(|e| e.offset());
214 let mut err_pos = 0;
215 let mut level = 0;
216 let mut buf = String::new();
217 macro_rules! indent {
218 () => {
219 for _ in 0..level {
220 buf.push_str(" ");
221 }
222 };
198 } 223 }
199 Ok(()) 224
225 for event in self.preorder() {
226 match event {
227 WalkEvent::Enter(node) => {
228 indent!();
229 writeln!(buf, "{:?}", node).unwrap();
230 if node.first_child().is_none() {
231 let off = node.range().end();
232 while err_pos < errors.len() && errors[err_pos].offset() <= off {
233 indent!();
234 writeln!(buf, "err: `{}`", errors[err_pos]).unwrap();
235 err_pos += 1;
236 }
237 }
238 level += 1;
239 }
240 WalkEvent::Leave(_) => level -= 1,
241 }
242 }
243
244 assert_eq!(level, 0);
245 for err in errors[err_pos..].iter() {
246 writeln!(buf, "err: `{}`", err).unwrap();
247 }
248
249 buf
250 }
251
252 pub(crate) fn root_data(&self) -> &Vec<SyntaxError> {
253 self.0.root_data()
254 }
255
256 pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode {
257 self.0.replace_self(replacement)
200 } 258 }
201} 259}
202 260
diff --git a/crates/ra_syntax/src/utils.rs b/crates/ra_syntax/src/utils.rs
deleted file mode 100644
index 2e1b42da0..000000000
--- a/crates/ra_syntax/src/utils.rs
+++ /dev/null
@@ -1,83 +0,0 @@
1use std::{str, fmt::Write};
2
3use crate::{SourceFile, SyntaxKind, WalkEvent, AstNode, SyntaxNode};
4
5/// Parse a file and create a string representation of the resulting parse tree.
6pub fn dump_tree(syntax: &SyntaxNode) -> String {
7 let mut errors: Vec<_> = match syntax.ancestors().find_map(SourceFile::cast) {
8 Some(file) => file.errors(),
9 None => syntax.root_data().to_vec(),
10 };
11 errors.sort_by_key(|e| e.offset());
12 let mut err_pos = 0;
13 let mut level = 0;
14 let mut buf = String::new();
15 macro_rules! indent {
16 () => {
17 for _ in 0..level {
18 buf.push_str(" ");
19 }
20 };
21 }
22
23 for event in syntax.preorder() {
24 match event {
25 WalkEvent::Enter(node) => {
26 indent!();
27 writeln!(buf, "{:?}", node).unwrap();
28 if node.first_child().is_none() {
29 let off = node.range().end();
30 while err_pos < errors.len() && errors[err_pos].offset() <= off {
31 indent!();
32 writeln!(buf, "err: `{}`", errors[err_pos]).unwrap();
33 err_pos += 1;
34 }
35 }
36 level += 1;
37 }
38 WalkEvent::Leave(_) => level -= 1,
39 }
40 }
41
42 assert_eq!(level, 0);
43 for err in errors[err_pos..].iter() {
44 writeln!(buf, "err: `{}`", err).unwrap();
45 }
46
47 buf
48}
49
50pub fn check_fuzz_invariants(text: &str) {
51 let file = SourceFile::parse(text);
52 let root = file.syntax();
53 validate_block_structure(root);
54 let _ = file.errors();
55}
56
57pub(crate) fn validate_block_structure(root: &SyntaxNode) {
58 let mut stack = Vec::new();
59 for node in root.descendants() {
60 match node.kind() {
61 SyntaxKind::L_CURLY => stack.push(node),
62 SyntaxKind::R_CURLY => {
63 if let Some(pair) = stack.pop() {
64 assert_eq!(
65 node.parent(),
66 pair.parent(),
67 "\nunpaired curleys:\n{}\n{}\n",
68 root.text(),
69 dump_tree(root),
70 );
71 assert!(
72 node.next_sibling().is_none() && pair.prev_sibling().is_none(),
73 "\nfloating curlys at {:?}\nfile:\n{}\nerror:\n{}\n",
74 node,
75 root.text(),
76 node.text(),
77 );
78 }
79 }
80 _ => (),
81 }
82 }
83}
diff --git a/crates/ra_syntax/src/validation.rs b/crates/ra_syntax/src/validation.rs
index 69958f0d7..69f344d65 100644
--- a/crates/ra_syntax/src/validation.rs
+++ b/crates/ra_syntax/src/validation.rs
@@ -5,7 +5,8 @@ mod string;
5mod block; 5mod block;
6 6
7use crate::{ 7use crate::{
8 SourceFile, SyntaxError, AstNode, 8 SourceFile, SyntaxError, AstNode, SyntaxNode,
9 SyntaxKind::{L_CURLY, R_CURLY},
9 ast, 10 ast,
10 algo::visit::{visitor_ctx, VisitorCtx}, 11 algo::visit::{visitor_ctx, VisitorCtx},
11}; 12};
@@ -14,12 +15,40 @@ pub(crate) fn validate(file: &SourceFile) -> Vec<SyntaxError> {
14 let mut errors = Vec::new(); 15 let mut errors = Vec::new();
15 for node in file.syntax().descendants() { 16 for node in file.syntax().descendants() {
16 let _ = visitor_ctx(&mut errors) 17 let _ = visitor_ctx(&mut errors)
17 .visit::<ast::Byte, _>(self::byte::validate_byte_node) 18 .visit::<ast::Byte, _>(byte::validate_byte_node)
18 .visit::<ast::ByteString, _>(self::byte_string::validate_byte_string_node) 19 .visit::<ast::ByteString, _>(byte_string::validate_byte_string_node)
19 .visit::<ast::Char, _>(self::char::validate_char_node) 20 .visit::<ast::Char, _>(char::validate_char_node)
20 .visit::<ast::String, _>(self::string::validate_string_node) 21 .visit::<ast::String, _>(string::validate_string_node)
21 .visit::<ast::Block, _>(self::block::validate_block_node) 22 .visit::<ast::Block, _>(block::validate_block_node)
22 .accept(node); 23 .accept(node);
23 } 24 }
24 errors 25 errors
25} 26}
27
28pub(crate) fn validate_block_structure(root: &SyntaxNode) {
29 let mut stack = Vec::new();
30 for node in root.descendants() {
31 match node.kind() {
32 L_CURLY => stack.push(node),
33 R_CURLY => {
34 if let Some(pair) = stack.pop() {
35 assert_eq!(
36 node.parent(),
37 pair.parent(),
38 "\nunpaired curleys:\n{}\n{}\n",
39 root.text(),
40 root.debug_dump(),
41 );
42 assert!(
43 node.next_sibling().is_none() && pair.prev_sibling().is_none(),
44 "\nfloating curlys at {:?}\nfile:\n{}\nerror:\n{}\n",
45 node,
46 root.text(),
47 node.text(),
48 );
49 }
50 }
51 _ => (),
52 }
53 }
54}
diff --git a/crates/ra_syntax/tests/test.rs b/crates/ra_syntax/tests/test.rs
index 168d0623d..458740c13 100644
--- a/crates/ra_syntax/tests/test.rs
+++ b/crates/ra_syntax/tests/test.rs
@@ -8,10 +8,7 @@ use std::{
8}; 8};
9 9
10use test_utils::{project_dir, dir_tests, read_text, collect_tests}; 10use test_utils::{project_dir, dir_tests, read_text, collect_tests};
11use ra_syntax::{ 11use ra_syntax::{SourceFile, AstNode, check_fuzz_invariants};
12 SourceFile, AstNode,
13 utils::{check_fuzz_invariants, dump_tree},
14};
15 12
16#[test] 13#[test]
17fn lexer_tests() { 14fn lexer_tests() {
@@ -32,7 +29,7 @@ fn parser_tests() {
32 "There should be no errors in the file {:?}", 29 "There should be no errors in the file {:?}",
33 path.display() 30 path.display()
34 ); 31 );
35 dump_tree(file.syntax()) 32 file.syntax().debug_dump()
36 }); 33 });
37 dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| { 34 dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| {
38 let file = SourceFile::parse(text); 35 let file = SourceFile::parse(text);
@@ -43,7 +40,7 @@ fn parser_tests() {
43 "There should be errors in the file {:?}", 40 "There should be errors in the file {:?}",
44 path.display() 41 path.display()
45 ); 42 );
46 dump_tree(file.syntax()) 43 file.syntax().debug_dump()
47 }); 44 });
48} 45}
49 46
diff --git a/crates/tools/src/lib.rs b/crates/tools/src/lib.rs
index 0a10d2737..3c23ed76e 100644
--- a/crates/tools/src/lib.rs
+++ b/crates/tools/src/lib.rs
@@ -14,11 +14,11 @@ pub use teraron::{Mode, Overwrite, Verify};
14pub type Result<T> = std::result::Result<T, failure::Error>; 14pub type Result<T> = std::result::Result<T, failure::Error>;
15 15
16pub const GRAMMAR: &str = "crates/ra_syntax/src/grammar.ron"; 16pub const GRAMMAR: &str = "crates/ra_syntax/src/grammar.ron";
17const GRAMMAR_DIR: &str = "crates/ra_syntax/src/parsing/grammar"; 17const GRAMMAR_DIR: &str = "crates/ra_parser/src/grammar";
18const OK_INLINE_TESTS_DIR: &str = "crates/ra_syntax/tests/data/parser/inline/ok"; 18const OK_INLINE_TESTS_DIR: &str = "crates/ra_syntax/tests/data/parser/inline/ok";
19const ERR_INLINE_TESTS_DIR: &str = "crates/ra_syntax/tests/data/parser/inline/err"; 19const ERR_INLINE_TESTS_DIR: &str = "crates/ra_syntax/tests/data/parser/inline/err";
20 20
21pub const SYNTAX_KINDS: &str = "crates/ra_syntax/src/syntax_kinds/generated.rs.tera"; 21pub const SYNTAX_KINDS: &str = "crates/ra_parser/src/syntax_kind/generated.rs.tera";
22pub const AST: &str = "crates/ra_syntax/src/ast/generated.rs.tera"; 22pub const AST: &str = "crates/ra_syntax/src/ast/generated.rs.tera";
23const TOOLCHAIN: &str = "stable"; 23const TOOLCHAIN: &str = "stable";
24 24