1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
|
//! Lexing, bridging to ra_parser (which does the actual parsing) and
//! incremental reparsing.
mod lexer;
mod text_token_source;
mod text_tree_sink;
mod reparsing;
use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode};
use text_token_source::TextTokenSource;
use text_tree_sink::TextTreeSink;
pub use lexer::*;
pub(crate) use self::reparsing::incremental_reparse;
use ra_parser::SyntaxKind;
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
let (tokens, lexer_errors) = tokenize(&text);
let mut token_source = TextTokenSource::new(text, &tokens);
let mut tree_sink = TextTreeSink::new(text, &tokens);
ra_parser::parse(&mut token_source, &mut tree_sink);
let (tree, mut parser_errors) = tree_sink.finish();
parser_errors.extend(lexer_errors);
(tree, parser_errors)
}
/// Returns `text` parsed as a `T` provided there are no parse errors.
pub(crate) fn parse_text_fragment<T: AstNode>(
text: &str,
fragment_kind: ra_parser::FragmentKind,
) -> Result<T, ()> {
let (tokens, lexer_errors) = tokenize(&text);
if !lexer_errors.is_empty() {
return Err(());
}
let mut token_source = TextTokenSource::new(text, &tokens);
let mut tree_sink = TextTreeSink::new(text, &tokens);
// TextTreeSink assumes that there's at least some root node to which it can attach errors and
// tokens. We arbitrarily give it a SourceFile.
use ra_parser::TreeSink;
tree_sink.start_node(SyntaxKind::SOURCE_FILE);
ra_parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
tree_sink.finish_node();
let (tree, parser_errors) = tree_sink.finish();
use ra_parser::TokenSource;
if !parser_errors.is_empty() || token_source.current().kind != SyntaxKind::EOF {
return Err(());
}
SyntaxNode::new_root(tree).first_child().and_then(T::cast).ok_or(())
}
|