diff options
Diffstat (limited to 'crates/ra_syntax/src')
-rw-r--r-- | crates/ra_syntax/src/algo.rs | 1 | ||||
-rw-r--r-- | crates/ra_syntax/src/ast.rs | 1 | ||||
-rw-r--r-- | crates/ra_syntax/src/lib.rs | 27 | ||||
-rw-r--r-- | crates/ra_syntax/src/parsing.rs | 44 | ||||
-rw-r--r-- | crates/ra_syntax/src/parsing/builder.rs | 55 | ||||
-rw-r--r-- | crates/ra_syntax/src/parsing/input.rs | 40 | ||||
-rw-r--r-- | crates/ra_syntax/src/parsing/reparsing.rs | 11 | ||||
-rw-r--r-- | crates/ra_syntax/src/syntax_node.rs | 8 |
8 files changed, 104 insertions, 83 deletions
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index 99b0983b0..e8cf0d4b5 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs | |||
@@ -33,6 +33,7 @@ pub fn find_covering_node(root: &SyntaxNode, range: TextRange) -> &SyntaxNode { | |||
33 | SyntaxNode::from_repr(root.0.covering_node(range)) | 33 | SyntaxNode::from_repr(root.0.covering_node(range)) |
34 | } | 34 | } |
35 | 35 | ||
36 | // Replace with `std::iter::successors` in `1.34.0` | ||
36 | pub fn generate<T>(seed: Option<T>, step: impl Fn(&T) -> Option<T>) -> impl Iterator<Item = T> { | 37 | pub fn generate<T>(seed: Option<T>, step: impl Fn(&T) -> Option<T>) -> impl Iterator<Item = T> { |
37 | ::itertools::unfold(seed, move |slot| { | 38 | ::itertools::unfold(seed, move |slot| { |
38 | slot.take().map(|curr| { | 39 | slot.take().map(|curr| { |
diff --git a/crates/ra_syntax/src/ast.rs b/crates/ra_syntax/src/ast.rs index 62641c9fe..20e0a6856 100644 --- a/crates/ra_syntax/src/ast.rs +++ b/crates/ra_syntax/src/ast.rs | |||
@@ -1,3 +1,4 @@ | |||
1 | //! Abstract Syntax Tree, layered on top of untyped `SyntaxNode`s | ||
1 | mod generated; | 2 | mod generated; |
2 | 3 | ||
3 | use std::marker::PhantomData; | 4 | use std::marker::PhantomData; |
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index c788bddec..6982b9815 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs | |||
@@ -1,20 +1,21 @@ | |||
1 | //! An experimental implementation of [Rust RFC#2256 libsyntax2.0][rfc#2256]. | 1 | //! Syntax Tree library used throughout the rust analyzer. |
2 | //! | 2 | //! |
3 | //! The intent is to be an IDE-ready parser, i.e. one that offers | 3 | //! Properties: |
4 | //! - easy and fast incremental re-parsing | ||
5 | //! - graceful handling of errors | ||
6 | //! - full-fidelity representation (*any* text can be precisely represented as | ||
7 | //! a syntax tree) | ||
4 | //! | 8 | //! |
5 | //! - easy and fast incremental re-parsing, | 9 | //! For more information, see the [RFC]. Current implementation is inspired by |
6 | //! - graceful handling of errors, and | 10 | //! the [Swift] one. |
7 | //! - maintains all information in the source file. | ||
8 | //! | 11 | //! |
9 | //! For more information, see [the RFC][rfc#2265], or [the working draft][RFC.md]. | 12 | //! The most interesting modules here are `syntax_node` (which defines concrete |
13 | //! syntax tree) and `ast` (which defines abstract syntax tree on top of the | ||
14 | //! CST). The actual parser live in a separate `ra_parser` crate, thought the | ||
15 | //! lexer lives in this crate. | ||
10 | //! | 16 | //! |
11 | //! [rfc#2256]: <https://github.com/rust-lang/rfcs/pull/2256> | 17 | //! [RFC]: <https://github.com/rust-lang/rfcs/pull/2256> |
12 | //! [RFC.md]: <https://github.com/matklad/libsyntax2/blob/master/docs/RFC.md> | 18 | //! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md> |
13 | |||
14 | #![forbid(missing_debug_implementations, unconditional_recursion, future_incompatible)] | ||
15 | #![deny(bad_style, missing_docs)] | ||
16 | #![allow(missing_docs)] | ||
17 | //#![warn(unreachable_pub)] // rust-lang/rust#47816 | ||
18 | 19 | ||
19 | mod syntax_node; | 20 | mod syntax_node; |
20 | mod syntax_text; | 21 | mod syntax_text; |
diff --git a/crates/ra_syntax/src/parsing.rs b/crates/ra_syntax/src/parsing.rs index 0a11600e1..cf573801c 100644 --- a/crates/ra_syntax/src/parsing.rs +++ b/crates/ra_syntax/src/parsing.rs | |||
@@ -1,17 +1,18 @@ | |||
1 | mod builder; | 1 | //! Lexing, bridging to ra_parser (which does the actual parsing) and |
2 | //! incremental reparsing. | ||
3 | |||
2 | mod lexer; | 4 | mod lexer; |
3 | mod input; | 5 | mod input; |
6 | mod builder; | ||
4 | mod reparsing; | 7 | mod reparsing; |
5 | 8 | ||
6 | use ra_parser::{parse, ParseError}; | ||
7 | |||
8 | use crate::{ | 9 | use crate::{ |
9 | SyntaxKind, SyntaxError, | 10 | SyntaxError, |
11 | syntax_node::GreenNode, | ||
10 | parsing::{ | 12 | parsing::{ |
11 | builder::TreeBuilder, | 13 | builder::TreeBuilder, |
12 | input::ParserInput, | 14 | input::ParserInput, |
13 | }, | 15 | }, |
14 | syntax_node::GreenNode, | ||
15 | }; | 16 | }; |
16 | 17 | ||
17 | pub use self::lexer::{tokenize, Token}; | 18 | pub use self::lexer::{tokenize, Token}; |
@@ -22,37 +23,6 @@ pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { | |||
22 | let tokens = tokenize(&text); | 23 | let tokens = tokenize(&text); |
23 | let token_source = ParserInput::new(text, &tokens); | 24 | let token_source = ParserInput::new(text, &tokens); |
24 | let mut tree_sink = TreeBuilder::new(text, &tokens); | 25 | let mut tree_sink = TreeBuilder::new(text, &tokens); |
25 | parse(&token_source, &mut tree_sink); | 26 | ra_parser::parse(&token_source, &mut tree_sink); |
26 | tree_sink.finish() | 27 | tree_sink.finish() |
27 | } | 28 | } |
28 | |||
29 | /// `TreeSink` abstracts details of a particular syntax tree implementation. | ||
30 | trait TreeSink { | ||
31 | type Tree; | ||
32 | |||
33 | /// Adds new leaf to the current branch. | ||
34 | fn leaf(&mut self, kind: SyntaxKind, n_tokens: u8); | ||
35 | |||
36 | /// Start new branch and make it current. | ||
37 | fn start_branch(&mut self, kind: SyntaxKind, root: bool); | ||
38 | |||
39 | /// Finish current branch and restore previous | ||
40 | /// branch as current. | ||
41 | fn finish_branch(&mut self, root: bool); | ||
42 | |||
43 | fn error(&mut self, error: ParseError); | ||
44 | |||
45 | /// Complete tree building. Make sure that | ||
46 | /// `start_branch` and `finish_branch` calls | ||
47 | /// are paired! | ||
48 | fn finish(self) -> Self::Tree; | ||
49 | } | ||
50 | |||
51 | /// `TokenSource` abstracts the source of the tokens parser operates one. | ||
52 | /// | ||
53 | /// Hopefully this will allow us to treat text and token trees in the same way! | ||
54 | trait TokenSource { | ||
55 | fn token_kind(&self, pos: usize) -> SyntaxKind; | ||
56 | fn is_token_joint_to_next(&self, pos: usize) -> bool; | ||
57 | fn is_keyword(&self, pos: usize, kw: &str) -> bool; | ||
58 | } | ||
diff --git a/crates/ra_syntax/src/parsing/builder.rs b/crates/ra_syntax/src/parsing/builder.rs index 0775b0900..cfe3139b8 100644 --- a/crates/ra_syntax/src/parsing/builder.rs +++ b/crates/ra_syntax/src/parsing/builder.rs | |||
@@ -1,4 +1,7 @@ | |||
1 | use std::mem; | ||
2 | |||
1 | use ra_parser::{TreeSink, ParseError}; | 3 | use ra_parser::{TreeSink, ParseError}; |
4 | use rowan::GreenNodeBuilder; | ||
2 | 5 | ||
3 | use crate::{ | 6 | use crate::{ |
4 | SmolStr, SyntaxError, SyntaxErrorKind, TextUnit, TextRange, | 7 | SmolStr, SyntaxError, SyntaxErrorKind, TextUnit, TextRange, |
@@ -7,19 +10,32 @@ use crate::{ | |||
7 | syntax_node::{GreenNode, RaTypes}, | 10 | syntax_node::{GreenNode, RaTypes}, |
8 | }; | 11 | }; |
9 | 12 | ||
10 | use rowan::GreenNodeBuilder; | 13 | /// Bridges the parser with our specific syntax tree representation. |
11 | 14 | /// | |
15 | /// `TreeBuilder` also handles attachment of trivia (whitespace) to nodes. | ||
12 | pub(crate) struct TreeBuilder<'a> { | 16 | pub(crate) struct TreeBuilder<'a> { |
13 | text: &'a str, | 17 | text: &'a str, |
14 | tokens: &'a [Token], | 18 | tokens: &'a [Token], |
15 | text_pos: TextUnit, | 19 | text_pos: TextUnit, |
16 | token_pos: usize, | 20 | token_pos: usize, |
21 | state: State, | ||
17 | errors: Vec<SyntaxError>, | 22 | errors: Vec<SyntaxError>, |
18 | inner: GreenNodeBuilder<RaTypes>, | 23 | inner: GreenNodeBuilder<RaTypes>, |
19 | } | 24 | } |
20 | 25 | ||
26 | enum State { | ||
27 | PendingStart, | ||
28 | Normal, | ||
29 | PendingFinish, | ||
30 | } | ||
31 | |||
21 | impl<'a> TreeSink for TreeBuilder<'a> { | 32 | impl<'a> TreeSink for TreeBuilder<'a> { |
22 | fn leaf(&mut self, kind: SyntaxKind, n_tokens: u8) { | 33 | fn leaf(&mut self, kind: SyntaxKind, n_tokens: u8) { |
34 | match mem::replace(&mut self.state, State::Normal) { | ||
35 | State::PendingStart => unreachable!(), | ||
36 | State::PendingFinish => self.inner.finish_internal(), | ||
37 | State::Normal => (), | ||
38 | } | ||
23 | self.eat_trivias(); | 39 | self.eat_trivias(); |
24 | let n_tokens = n_tokens as usize; | 40 | let n_tokens = n_tokens as usize; |
25 | let len = self.tokens[self.token_pos..self.token_pos + n_tokens] | 41 | let len = self.tokens[self.token_pos..self.token_pos + n_tokens] |
@@ -29,11 +45,18 @@ impl<'a> TreeSink for TreeBuilder<'a> { | |||
29 | self.do_leaf(kind, len, n_tokens); | 45 | self.do_leaf(kind, len, n_tokens); |
30 | } | 46 | } |
31 | 47 | ||
32 | fn start_branch(&mut self, kind: SyntaxKind, root: bool) { | 48 | fn start_branch(&mut self, kind: SyntaxKind) { |
33 | if root { | 49 | match mem::replace(&mut self.state, State::Normal) { |
34 | self.inner.start_internal(kind); | 50 | State::PendingStart => { |
35 | return; | 51 | self.inner.start_internal(kind); |
52 | // No need to attach trivias to previous node: there is no | ||
53 | // previous node. | ||
54 | return; | ||
55 | } | ||
56 | State::PendingFinish => self.inner.finish_internal(), | ||
57 | State::Normal => (), | ||
36 | } | 58 | } |
59 | |||
37 | let n_trivias = | 60 | let n_trivias = |
38 | self.tokens[self.token_pos..].iter().take_while(|it| it.kind.is_trivia()).count(); | 61 | self.tokens[self.token_pos..].iter().take_while(|it| it.kind.is_trivia()).count(); |
39 | let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias]; | 62 | let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias]; |
@@ -54,11 +77,12 @@ impl<'a> TreeSink for TreeBuilder<'a> { | |||
54 | self.eat_n_trivias(n_attached_trivias); | 77 | self.eat_n_trivias(n_attached_trivias); |
55 | } | 78 | } |
56 | 79 | ||
57 | fn finish_branch(&mut self, root: bool) { | 80 | fn finish_branch(&mut self) { |
58 | if root { | 81 | match mem::replace(&mut self.state, State::PendingFinish) { |
59 | self.eat_trivias() | 82 | State::PendingStart => unreachable!(), |
83 | State::PendingFinish => self.inner.finish_internal(), | ||
84 | State::Normal => (), | ||
60 | } | 85 | } |
61 | self.inner.finish_internal(); | ||
62 | } | 86 | } |
63 | 87 | ||
64 | fn error(&mut self, error: ParseError) { | 88 | fn error(&mut self, error: ParseError) { |
@@ -74,12 +98,21 @@ impl<'a> TreeBuilder<'a> { | |||
74 | tokens, | 98 | tokens, |
75 | text_pos: 0.into(), | 99 | text_pos: 0.into(), |
76 | token_pos: 0, | 100 | token_pos: 0, |
101 | state: State::PendingStart, | ||
77 | errors: Vec::new(), | 102 | errors: Vec::new(), |
78 | inner: GreenNodeBuilder::new(), | 103 | inner: GreenNodeBuilder::new(), |
79 | } | 104 | } |
80 | } | 105 | } |
81 | 106 | ||
82 | pub(super) fn finish(self) -> (GreenNode, Vec<SyntaxError>) { | 107 | pub(super) fn finish(mut self) -> (GreenNode, Vec<SyntaxError>) { |
108 | match mem::replace(&mut self.state, State::Normal) { | ||
109 | State::PendingFinish => { | ||
110 | self.eat_trivias(); | ||
111 | self.inner.finish_internal() | ||
112 | } | ||
113 | State::PendingStart | State::Normal => unreachable!(), | ||
114 | } | ||
115 | |||
83 | (self.inner.finish(), self.errors) | 116 | (self.inner.finish(), self.errors) |
84 | } | 117 | } |
85 | 118 | ||
diff --git a/crates/ra_syntax/src/parsing/input.rs b/crates/ra_syntax/src/parsing/input.rs index 58be795bc..31c6a3b9b 100644 --- a/crates/ra_syntax/src/parsing/input.rs +++ b/crates/ra_syntax/src/parsing/input.rs | |||
@@ -5,6 +5,26 @@ use crate::{ | |||
5 | parsing::lexer::Token, | 5 | parsing::lexer::Token, |
6 | }; | 6 | }; |
7 | 7 | ||
8 | pub(crate) struct ParserInput<'t> { | ||
9 | text: &'t str, | ||
10 | /// start position of each token(expect whitespace and comment) | ||
11 | /// ```non-rust | ||
12 | /// struct Foo; | ||
13 | /// ^------^--- | ||
14 | /// | | ^- | ||
15 | /// 0 7 10 | ||
16 | /// ``` | ||
17 | /// (token, start_offset): `[(struct, 0), (Foo, 7), (;, 10)]` | ||
18 | start_offsets: Vec<TextUnit>, | ||
19 | /// non-whitespace/comment tokens | ||
20 | /// ```non-rust | ||
21 | /// struct Foo {} | ||
22 | /// ^^^^^^ ^^^ ^^ | ||
23 | /// ``` | ||
24 | /// tokens: `[struct, Foo, {, }]` | ||
25 | tokens: Vec<Token>, | ||
26 | } | ||
27 | |||
8 | impl<'t> TokenSource for ParserInput<'t> { | 28 | impl<'t> TokenSource for ParserInput<'t> { |
9 | fn token_kind(&self, pos: usize) -> SyntaxKind { | 29 | fn token_kind(&self, pos: usize) -> SyntaxKind { |
10 | if !(pos < self.tokens.len()) { | 30 | if !(pos < self.tokens.len()) { |
@@ -28,26 +48,6 @@ impl<'t> TokenSource for ParserInput<'t> { | |||
28 | } | 48 | } |
29 | } | 49 | } |
30 | 50 | ||
31 | pub(crate) struct ParserInput<'t> { | ||
32 | text: &'t str, | ||
33 | /// start position of each token(expect whitespace and comment) | ||
34 | /// ```non-rust | ||
35 | /// struct Foo; | ||
36 | /// ^------^--- | ||
37 | /// | | ^- | ||
38 | /// 0 7 10 | ||
39 | /// ``` | ||
40 | /// (token, start_offset): `[(struct, 0), (Foo, 7), (;, 10)]` | ||
41 | start_offsets: Vec<TextUnit>, | ||
42 | /// non-whitespace/comment tokens | ||
43 | /// ```non-rust | ||
44 | /// struct Foo {} | ||
45 | /// ^^^^^^ ^^^ ^^ | ||
46 | /// ``` | ||
47 | /// tokens: `[struct, Foo, {, }]` | ||
48 | tokens: Vec<Token>, | ||
49 | } | ||
50 | |||
51 | impl<'t> ParserInput<'t> { | 51 | impl<'t> ParserInput<'t> { |
52 | /// Generate input from tokens(expect comment and whitespace). | 52 | /// Generate input from tokens(expect comment and whitespace). |
53 | pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> ParserInput<'t> { | 53 | pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> ParserInput<'t> { |
diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs index ffcb512ad..6957c26c0 100644 --- a/crates/ra_syntax/src/parsing/reparsing.rs +++ b/crates/ra_syntax/src/parsing/reparsing.rs | |||
@@ -1,11 +1,18 @@ | |||
1 | //! Implementation of incremental re-parsing. | ||
2 | //! | ||
3 | //! We use two simple strategies for this: | ||
4 | //! - if the edit modifies only a single token (like changing an identifier's | ||
5 | //! letter), we replace only this token. | ||
6 | //! - otherwise, we search for the nearest `{}` block which contains the edit | ||
7 | //! and try to parse only this block. | ||
8 | |||
1 | use ra_text_edit::AtomTextEdit; | 9 | use ra_text_edit::AtomTextEdit; |
2 | use ra_parser::Reparser; | 10 | use ra_parser::Reparser; |
3 | 11 | ||
4 | use crate::{ | 12 | use crate::{ |
5 | SyntaxKind::*, TextRange, TextUnit, | 13 | SyntaxKind::*, TextRange, TextUnit, SyntaxError, |
6 | algo, | 14 | algo, |
7 | syntax_node::{GreenNode, SyntaxNode}, | 15 | syntax_node::{GreenNode, SyntaxNode}, |
8 | syntax_error::SyntaxError, | ||
9 | parsing::{ | 16 | parsing::{ |
10 | input::ParserInput, | 17 | input::ParserInput, |
11 | builder::TreeBuilder, | 18 | builder::TreeBuilder, |
diff --git a/crates/ra_syntax/src/syntax_node.rs b/crates/ra_syntax/src/syntax_node.rs index aa627398d..a1bc0b499 100644 --- a/crates/ra_syntax/src/syntax_node.rs +++ b/crates/ra_syntax/src/syntax_node.rs | |||
@@ -1,3 +1,11 @@ | |||
1 | //! This module defines Concrete Syntax Tree (CST), used by rust-analyzer. | ||
2 | //! | ||
3 | //! The CST includes comments and whitespace, provides a single node type, | ||
4 | //! `SyntaxNode`, and a basic traversal API (parent, children, siblings). | ||
5 | //! | ||
6 | //! The *real* implementation is in the (language-agnostic) `rowan` crate, this | ||
7 | //! modules just wraps its API. | ||
8 | |||
1 | use std::{fmt, borrow::Borrow}; | 9 | use std::{fmt, borrow::Borrow}; |
2 | 10 | ||
3 | use rowan::{Types, TransparentNewType}; | 11 | use rowan::{Types, TransparentNewType}; |