aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--crates/ra_mbe/src/subtree_parser.rs4
-rw-r--r--crates/ra_mbe/src/subtree_source.rs59
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs63
-rw-r--r--crates/ra_parser/src/lib.rs54
-rw-r--r--crates/ra_parser/src/parser.rs57
-rw-r--r--crates/ra_syntax/src/parsing.rs4
-rw-r--r--crates/ra_syntax/src/parsing/reparsing.rs4
-rw-r--r--crates/ra_syntax/src/parsing/text_token_source.rs47
-rw-r--r--docs/user/README.md19
9 files changed, 185 insertions, 126 deletions
diff --git a/crates/ra_mbe/src/subtree_parser.rs b/crates/ra_mbe/src/subtree_parser.rs
index 709b87a38..9cc989b23 100644
--- a/crates/ra_mbe/src/subtree_parser.rs
+++ b/crates/ra_mbe/src/subtree_parser.rs
@@ -68,13 +68,13 @@ impl<'a> Parser<'a> {
68 68
69 fn parse<F>(self, f: F) -> Option<tt::TokenTree> 69 fn parse<F>(self, f: F) -> Option<tt::TokenTree>
70 where 70 where
71 F: FnOnce(&dyn TokenSource, &mut dyn TreeSink), 71 F: FnOnce(&mut dyn TokenSource, &mut dyn TreeSink),
72 { 72 {
73 let buffer = TokenBuffer::new(&self.subtree.token_trees[*self.cur_pos..]); 73 let buffer = TokenBuffer::new(&self.subtree.token_trees[*self.cur_pos..]);
74 let mut src = SubtreeTokenSource::new(&buffer); 74 let mut src = SubtreeTokenSource::new(&buffer);
75 let mut sink = OffsetTokenSink { token_pos: 0, error: false }; 75 let mut sink = OffsetTokenSink { token_pos: 0, error: false };
76 76
77 f(&src, &mut sink); 77 f(&mut src, &mut sink);
78 78
79 let r = self.finish(sink.token_pos, &mut src); 79 let r = self.finish(sink.token_pos, &mut src);
80 if sink.error { 80 if sink.error {
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs
index 972af4a7c..c4f79f38a 100644
--- a/crates/ra_mbe/src/subtree_source.rs
+++ b/crates/ra_mbe/src/subtree_source.rs
@@ -1,6 +1,7 @@
1use ra_parser::{TokenSource}; 1use ra_parser::{TokenSource, Token};
2use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*, T}; 2use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*, T};
3use std::cell::{RefCell, Cell}; 3use std::cell::{RefCell, Cell};
4use std::sync::Arc;
4use tt::buffer::{TokenBuffer, Cursor}; 5use tt::buffer::{TokenBuffer, Cursor};
5 6
6pub(crate) trait Querier { 7pub(crate) trait Querier {
@@ -65,7 +66,7 @@ impl<'a> SubtreeWalk<'a> {
65 return cached[pos].clone(); 66 return cached[pos].clone();
66 } 67 }
67 68
68 fn collect_token_trees(&mut self, n: usize) -> Vec<tt::TokenTree> { 69 fn collect_token_trees(&self, n: usize) -> Vec<tt::TokenTree> {
69 let mut res = vec![]; 70 let mut res = vec![];
70 71
71 let mut pos = 0; 72 let mut pos = 0;
@@ -117,43 +118,59 @@ impl<'a> Querier for SubtreeWalk<'a> {
117} 118}
118 119
119pub(crate) struct SubtreeTokenSource<'a> { 120pub(crate) struct SubtreeTokenSource<'a> {
120 walker: SubtreeWalk<'a>, 121 walker: Arc<SubtreeWalk<'a>>,
122 curr: (Token, usize),
121} 123}
122 124
123impl<'a> SubtreeTokenSource<'a> { 125impl<'a> SubtreeTokenSource<'a> {
124 pub fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> { 126 pub fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> {
125 SubtreeTokenSource { walker: SubtreeWalk::new(buffer.begin()) } 127 let mut res = SubtreeTokenSource {
128 walker: Arc::new(SubtreeWalk::new(buffer.begin())),
129 curr: (Token { kind: EOF, is_jointed_to_next: false }, 0),
130 };
131 res.curr = (res.mk_token(0), 0);
132 res
126 } 133 }
127 134
128 pub fn querier<'b>(&'a self) -> &'b SubtreeWalk<'a> 135 pub fn querier(&self) -> Arc<SubtreeWalk<'a>> {
129 where 136 self.walker.clone()
130 'a: 'b,
131 {
132 &self.walker
133 } 137 }
134 138
135 pub(crate) fn bump_n(&mut self, parsed_tokens: usize) -> Vec<tt::TokenTree> { 139 pub(crate) fn bump_n(&mut self, parsed_tokens: usize) -> Vec<tt::TokenTree> {
136 let res = self.walker.collect_token_trees(parsed_tokens); 140 let res = self.walker.collect_token_trees(parsed_tokens);
137 res 141 res
138 } 142 }
143
144 fn mk_token(&self, pos: usize) -> Token {
145 match self.walker.get(pos) {
146 Some(tt) => Token { kind: tt.kind, is_jointed_to_next: tt.is_joint_to_next },
147 None => Token { kind: EOF, is_jointed_to_next: false },
148 }
149 }
139} 150}
140 151
141impl<'a> TokenSource for SubtreeTokenSource<'a> { 152impl<'a> TokenSource for SubtreeTokenSource<'a> {
142 fn token_kind(&self, pos: usize) -> SyntaxKind { 153 fn current(&self) -> Token {
143 if let Some(tok) = self.walker.get(pos) { 154 self.curr.0
144 tok.kind
145 } else {
146 SyntaxKind::EOF
147 }
148 } 155 }
149 fn is_token_joint_to_next(&self, pos: usize) -> bool { 156
150 match self.walker.get(pos) { 157 /// Lookahead n token
151 Some(t) => t.is_joint_to_next, 158 fn lookahead_nth(&self, n: usize) -> Token {
152 _ => false, 159 self.mk_token(self.curr.1 + n)
160 }
161
162 /// bump cursor to next token
163 fn bump(&mut self) {
164 if self.current().kind == EOF {
165 return;
153 } 166 }
167
168 self.curr = (self.mk_token(self.curr.1 + 1), self.curr.1 + 1)
154 } 169 }
155 fn is_keyword(&self, pos: usize, kw: &str) -> bool { 170
156 match self.walker.get(pos) { 171 /// Is the current token a specified keyword?
172 fn is_keyword(&self, kw: &str) -> bool {
173 match self.walker.get(self.curr.1) {
157 Some(t) => t.text == *kw, 174 Some(t) => t.text == *kw,
158 _ => false, 175 _ => false,
159 } 176 }
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index 0a75305b4..0a7e50c4e 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -45,17 +45,25 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke
45// 45//
46// 46//
47 47
48/// Parses the token tree (result of macro expansion) to an expression 48fn token_tree_to_syntax_node<F>(tt: &tt::Subtree, f: F) -> Result<TreeArc<SyntaxNode>, ExpandError>
49pub fn token_tree_to_expr(tt: &tt::Subtree) -> Result<TreeArc<ast::Expr>, ExpandError> { 49where
50 F: Fn(&mut ra_parser::TokenSource, &mut ra_parser::TreeSink),
51{
50 let buffer = tt::buffer::TokenBuffer::new(&[tt.clone().into()]); 52 let buffer = tt::buffer::TokenBuffer::new(&[tt.clone().into()]);
51 let token_source = SubtreeTokenSource::new(&buffer); 53 let mut token_source = SubtreeTokenSource::new(&buffer);
52 let mut tree_sink = TtTreeSink::new(token_source.querier()); 54 let querier = token_source.querier();
53 ra_parser::parse_expr(&token_source, &mut tree_sink); 55 let mut tree_sink = TtTreeSink::new(querier.as_ref());
56 f(&mut token_source, &mut tree_sink);
54 if tree_sink.roots.len() != 1 { 57 if tree_sink.roots.len() != 1 {
55 return Err(ExpandError::ConversionError); 58 return Err(ExpandError::ConversionError);
56 } 59 }
57 60
58 let syntax = tree_sink.inner.finish(); 61 Ok(tree_sink.inner.finish())
62}
63
64/// Parses the token tree (result of macro expansion) to an expression
65pub fn token_tree_to_expr(tt: &tt::Subtree) -> Result<TreeArc<ast::Expr>, ExpandError> {
66 let syntax = token_tree_to_syntax_node(tt, ra_parser::parse_expr)?;
59 ast::Expr::cast(&syntax) 67 ast::Expr::cast(&syntax)
60 .map(|m| m.to_owned()) 68 .map(|m| m.to_owned())
61 .ok_or_else(|| crate::ExpandError::ConversionError) 69 .ok_or_else(|| crate::ExpandError::ConversionError)
@@ -63,28 +71,13 @@ pub fn token_tree_to_expr(tt: &tt::Subtree) -> Result<TreeArc<ast::Expr>, Expand
63 71
64/// Parses the token tree (result of macro expansion) to a Pattern 72/// Parses the token tree (result of macro expansion) to a Pattern
65pub fn token_tree_to_pat(tt: &tt::Subtree) -> Result<TreeArc<ast::Pat>, ExpandError> { 73pub fn token_tree_to_pat(tt: &tt::Subtree) -> Result<TreeArc<ast::Pat>, ExpandError> {
66 let buffer = tt::buffer::TokenBuffer::new(&[tt.clone().into()]); 74 let syntax = token_tree_to_syntax_node(tt, ra_parser::parse_pat)?;
67 let token_source = SubtreeTokenSource::new(&buffer);
68 let mut tree_sink = TtTreeSink::new(token_source.querier());
69 ra_parser::parse_pat(&token_source, &mut tree_sink);
70 if tree_sink.roots.len() != 1 {
71 return Err(ExpandError::ConversionError);
72 }
73
74 let syntax = tree_sink.inner.finish();
75 ast::Pat::cast(&syntax).map(|m| m.to_owned()).ok_or_else(|| ExpandError::ConversionError) 75 ast::Pat::cast(&syntax).map(|m| m.to_owned()).ok_or_else(|| ExpandError::ConversionError)
76} 76}
77 77
78/// Parses the token tree (result of macro expansion) to a Type 78/// Parses the token tree (result of macro expansion) to a Type
79pub fn token_tree_to_ty(tt: &tt::Subtree) -> Result<TreeArc<ast::TypeRef>, ExpandError> { 79pub fn token_tree_to_ty(tt: &tt::Subtree) -> Result<TreeArc<ast::TypeRef>, ExpandError> {
80 let buffer = tt::buffer::TokenBuffer::new(&[tt.clone().into()]); 80 let syntax = token_tree_to_syntax_node(tt, ra_parser::parse_ty)?;
81 let token_source = SubtreeTokenSource::new(&buffer);
82 let mut tree_sink = TtTreeSink::new(token_source.querier());
83 ra_parser::parse_ty(&token_source, &mut tree_sink);
84 if tree_sink.roots.len() != 1 {
85 return Err(ExpandError::ConversionError);
86 }
87 let syntax = tree_sink.inner.finish();
88 ast::TypeRef::cast(&syntax).map(|m| m.to_owned()).ok_or_else(|| ExpandError::ConversionError) 81 ast::TypeRef::cast(&syntax).map(|m| m.to_owned()).ok_or_else(|| ExpandError::ConversionError)
89} 82}
90 83
@@ -92,14 +85,7 @@ pub fn token_tree_to_ty(tt: &tt::Subtree) -> Result<TreeArc<ast::TypeRef>, Expan
92pub fn token_tree_to_macro_stmts( 85pub fn token_tree_to_macro_stmts(
93 tt: &tt::Subtree, 86 tt: &tt::Subtree,
94) -> Result<TreeArc<ast::MacroStmts>, ExpandError> { 87) -> Result<TreeArc<ast::MacroStmts>, ExpandError> {
95 let buffer = tt::buffer::TokenBuffer::new(&[tt.clone().into()]); 88 let syntax = token_tree_to_syntax_node(tt, ra_parser::parse_macro_stmts)?;
96 let token_source = SubtreeTokenSource::new(&buffer);
97 let mut tree_sink = TtTreeSink::new(token_source.querier());
98 ra_parser::parse_macro_stmts(&token_source, &mut tree_sink);
99 if tree_sink.roots.len() != 1 {
100 return Err(ExpandError::ConversionError);
101 }
102 let syntax = tree_sink.inner.finish();
103 ast::MacroStmts::cast(&syntax).map(|m| m.to_owned()).ok_or_else(|| ExpandError::ConversionError) 89 ast::MacroStmts::cast(&syntax).map(|m| m.to_owned()).ok_or_else(|| ExpandError::ConversionError)
104} 90}
105 91
@@ -107,24 +93,13 @@ pub fn token_tree_to_macro_stmts(
107pub fn token_tree_to_macro_items( 93pub fn token_tree_to_macro_items(
108 tt: &tt::Subtree, 94 tt: &tt::Subtree,
109) -> Result<TreeArc<ast::MacroItems>, ExpandError> { 95) -> Result<TreeArc<ast::MacroItems>, ExpandError> {
110 let buffer = tt::buffer::TokenBuffer::new(&[tt.clone().into()]); 96 let syntax = token_tree_to_syntax_node(tt, ra_parser::parse_macro_items)?;
111 let token_source = SubtreeTokenSource::new(&buffer);
112 let mut tree_sink = TtTreeSink::new(token_source.querier());
113 ra_parser::parse_macro_items(&token_source, &mut tree_sink);
114 if tree_sink.roots.len() != 1 {
115 return Err(ExpandError::ConversionError);
116 }
117 let syntax = tree_sink.inner.finish();
118 ast::MacroItems::cast(&syntax).map(|m| m.to_owned()).ok_or_else(|| ExpandError::ConversionError) 97 ast::MacroItems::cast(&syntax).map(|m| m.to_owned()).ok_or_else(|| ExpandError::ConversionError)
119} 98}
120 99
121/// Parses the token tree (result of macro expansion) as a sequence of items 100/// Parses the token tree (result of macro expansion) as a sequence of items
122pub fn token_tree_to_ast_item_list(tt: &tt::Subtree) -> TreeArc<ast::SourceFile> { 101pub fn token_tree_to_ast_item_list(tt: &tt::Subtree) -> TreeArc<ast::SourceFile> {
123 let buffer = tt::buffer::TokenBuffer::new(&[tt.clone().into()]); 102 let syntax = token_tree_to_syntax_node(tt, ra_parser::parse).unwrap();
124 let token_source = SubtreeTokenSource::new(&buffer);
125 let mut tree_sink = TtTreeSink::new(token_source.querier());
126 ra_parser::parse(&token_source, &mut tree_sink);
127 let syntax = tree_sink.inner.finish();
128 ast::SourceFile::cast(&syntax).unwrap().to_owned() 103 ast::SourceFile::cast(&syntax).unwrap().to_owned()
129} 104}
130 105
diff --git a/crates/ra_parser/src/lib.rs b/crates/ra_parser/src/lib.rs
index 697d1b794..3d88be642 100644
--- a/crates/ra_parser/src/lib.rs
+++ b/crates/ra_parser/src/lib.rs
@@ -31,12 +31,26 @@ pub struct ParseError(pub String);
31/// 31///
32/// Hopefully this will allow us to treat text and token trees in the same way! 32/// Hopefully this will allow us to treat text and token trees in the same way!
33pub trait TokenSource { 33pub trait TokenSource {
34 fn current(&self) -> Token;
35
36 /// Lookahead n token
37 fn lookahead_nth(&self, n: usize) -> Token;
38
39 /// bump cursor to next token
40 fn bump(&mut self);
41
42 /// Is the current token a specified keyword?
43 fn is_keyword(&self, kw: &str) -> bool;
44}
45
46/// `TokenCursor` abstracts the cursor of `TokenSource` operates one.
47#[derive(Debug, Copy, Clone, Eq, PartialEq)]
48pub struct Token {
34 /// What is the current token? 49 /// What is the current token?
35 fn token_kind(&self, pos: usize) -> SyntaxKind; 50 pub kind: SyntaxKind,
51
36 /// Is the current token joined to the next one (`> >` vs `>>`). 52 /// Is the current token joined to the next one (`> >` vs `>>`).
37 fn is_token_joint_to_next(&self, pos: usize) -> bool; 53 pub is_jointed_to_next: bool,
38 /// Is the current token a specified keyword?
39 fn is_keyword(&self, pos: usize, kw: &str) -> bool;
40} 54}
41 55
42/// `TreeSink` abstracts details of a particular syntax tree implementation. 56/// `TreeSink` abstracts details of a particular syntax tree implementation.
@@ -54,7 +68,7 @@ pub trait TreeSink {
54 fn error(&mut self, error: ParseError); 68 fn error(&mut self, error: ParseError);
55} 69}
56 70
57fn parse_from_tokens<F>(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink, f: F) 71fn parse_from_tokens<F>(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink, f: F)
58where 72where
59 F: FnOnce(&mut parser::Parser), 73 F: FnOnce(&mut parser::Parser),
60{ 74{
@@ -65,61 +79,65 @@ where
65} 79}
66 80
67/// Parse given tokens into the given sink as a rust file. 81/// Parse given tokens into the given sink as a rust file.
68pub fn parse(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { 82pub fn parse(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
69 parse_from_tokens(token_source, tree_sink, grammar::root); 83 parse_from_tokens(token_source, tree_sink, grammar::root);
70} 84}
71 85
72/// Parse given tokens into the given sink as a path 86/// Parse given tokens into the given sink as a path
73pub fn parse_path(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { 87pub fn parse_path(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
74 parse_from_tokens(token_source, tree_sink, grammar::path); 88 parse_from_tokens(token_source, tree_sink, grammar::path);
75} 89}
76 90
77/// Parse given tokens into the given sink as a expression 91/// Parse given tokens into the given sink as a expression
78pub fn parse_expr(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { 92pub fn parse_expr(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
79 parse_from_tokens(token_source, tree_sink, grammar::expr); 93 parse_from_tokens(token_source, tree_sink, grammar::expr);
80} 94}
81 95
82/// Parse given tokens into the given sink as a ty 96/// Parse given tokens into the given sink as a ty
83pub fn parse_ty(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { 97pub fn parse_ty(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
84 parse_from_tokens(token_source, tree_sink, grammar::type_); 98 parse_from_tokens(token_source, tree_sink, grammar::type_);
85} 99}
86 100
87/// Parse given tokens into the given sink as a pattern 101/// Parse given tokens into the given sink as a pattern
88pub fn parse_pat(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { 102pub fn parse_pat(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
89 parse_from_tokens(token_source, tree_sink, grammar::pattern); 103 parse_from_tokens(token_source, tree_sink, grammar::pattern);
90} 104}
91 105
92/// Parse given tokens into the given sink as a statement 106/// Parse given tokens into the given sink as a statement
93pub fn parse_stmt(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink, with_semi: bool) { 107pub fn parse_stmt(
108 token_source: &mut dyn TokenSource,
109 tree_sink: &mut dyn TreeSink,
110 with_semi: bool,
111) {
94 parse_from_tokens(token_source, tree_sink, |p| grammar::stmt(p, with_semi)); 112 parse_from_tokens(token_source, tree_sink, |p| grammar::stmt(p, with_semi));
95} 113}
96 114
97/// Parse given tokens into the given sink as a block 115/// Parse given tokens into the given sink as a block
98pub fn parse_block(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { 116pub fn parse_block(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
99 parse_from_tokens(token_source, tree_sink, grammar::block); 117 parse_from_tokens(token_source, tree_sink, grammar::block);
100} 118}
101 119
102pub fn parse_meta(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { 120pub fn parse_meta(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
103 parse_from_tokens(token_source, tree_sink, grammar::meta_item); 121 parse_from_tokens(token_source, tree_sink, grammar::meta_item);
104} 122}
105 123
106/// Parse given tokens into the given sink as an item 124/// Parse given tokens into the given sink as an item
107pub fn parse_item(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { 125pub fn parse_item(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
108 parse_from_tokens(token_source, tree_sink, grammar::item); 126 parse_from_tokens(token_source, tree_sink, grammar::item);
109} 127}
110 128
111/// Parse given tokens into the given sink as an visibility qualifier 129/// Parse given tokens into the given sink as an visibility qualifier
112pub fn parse_vis(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { 130pub fn parse_vis(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
113 parse_from_tokens(token_source, tree_sink, |p| { 131 parse_from_tokens(token_source, tree_sink, |p| {
114 grammar::opt_visibility(p); 132 grammar::opt_visibility(p);
115 }); 133 });
116} 134}
117 135
118pub fn parse_macro_items(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { 136pub fn parse_macro_items(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
119 parse_from_tokens(token_source, tree_sink, grammar::macro_items); 137 parse_from_tokens(token_source, tree_sink, grammar::macro_items);
120} 138}
121 139
122pub fn parse_macro_stmts(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { 140pub fn parse_macro_stmts(token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
123 parse_from_tokens(token_source, tree_sink, grammar::macro_stmts); 141 parse_from_tokens(token_source, tree_sink, grammar::macro_stmts);
124} 142}
125 143
@@ -140,7 +158,7 @@ impl Reparser {
140 /// 158 ///
141 /// Tokens must start with `{`, end with `}` and form a valid brace 159 /// Tokens must start with `{`, end with `}` and form a valid brace
142 /// sequence. 160 /// sequence.
143 pub fn parse(self, token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { 161 pub fn parse(self, token_source: &mut dyn TokenSource, tree_sink: &mut dyn TreeSink) {
144 let Reparser(r) = self; 162 let Reparser(r) = self;
145 let mut p = parser::Parser::new(token_source); 163 let mut p = parser::Parser::new(token_source);
146 r(&mut p); 164 r(&mut p);
diff --git a/crates/ra_parser/src/parser.rs b/crates/ra_parser/src/parser.rs
index 4434dfb09..8f654f04c 100644
--- a/crates/ra_parser/src/parser.rs
+++ b/crates/ra_parser/src/parser.rs
@@ -19,15 +19,14 @@ use crate::{
19/// "start expression, consume number literal, 19/// "start expression, consume number literal,
20/// finish expression". See `Event` docs for more. 20/// finish expression". See `Event` docs for more.
21pub(crate) struct Parser<'t> { 21pub(crate) struct Parser<'t> {
22 token_source: &'t dyn TokenSource, 22 token_source: &'t mut dyn TokenSource,
23 token_pos: usize,
24 events: Vec<Event>, 23 events: Vec<Event>,
25 steps: Cell<u32>, 24 steps: Cell<u32>,
26} 25}
27 26
28impl<'t> Parser<'t> { 27impl<'t> Parser<'t> {
29 pub(super) fn new(token_source: &'t dyn TokenSource) -> Parser<'t> { 28 pub(super) fn new(token_source: &'t mut dyn TokenSource) -> Parser<'t> {
30 Parser { token_source, token_pos: 0, events: Vec::new(), steps: Cell::new(0) } 29 Parser { token_source, events: Vec::new(), steps: Cell::new(0) }
31 } 30 }
32 31
33 pub(crate) fn finish(self) -> Vec<Event> { 32 pub(crate) fn finish(self) -> Vec<Event> {
@@ -49,7 +48,7 @@ impl<'t> Parser<'t> {
49 let c1 = self.nth(0); 48 let c1 = self.nth(0);
50 let c2 = self.nth(1); 49 let c2 = self.nth(1);
51 50
52 if self.token_source.is_token_joint_to_next(self.token_pos) { 51 if self.token_source.current().is_jointed_to_next {
53 Some((c1, c2)) 52 Some((c1, c2))
54 } else { 53 } else {
55 None 54 None
@@ -64,8 +63,8 @@ impl<'t> Parser<'t> {
64 let c1 = self.nth(0); 63 let c1 = self.nth(0);
65 let c2 = self.nth(1); 64 let c2 = self.nth(1);
66 let c3 = self.nth(2); 65 let c3 = self.nth(2);
67 if self.token_source.is_token_joint_to_next(self.token_pos) 66 if self.token_source.current().is_jointed_to_next
68 && self.token_source.is_token_joint_to_next(self.token_pos + 1) 67 && self.token_source.lookahead_nth(1).is_jointed_to_next
69 { 68 {
70 Some((c1, c2, c3)) 69 Some((c1, c2, c3))
71 } else { 70 } else {
@@ -76,6 +75,8 @@ impl<'t> Parser<'t> {
76 /// Lookahead operation: returns the kind of the next nth 75 /// Lookahead operation: returns the kind of the next nth
77 /// token. 76 /// token.
78 pub(crate) fn nth(&self, n: usize) -> SyntaxKind { 77 pub(crate) fn nth(&self, n: usize) -> SyntaxKind {
78 assert!(n <= 3);
79
79 let steps = self.steps.get(); 80 let steps = self.steps.get();
80 assert!(steps <= 10_000_000, "the parser seems stuck"); 81 assert!(steps <= 10_000_000, "the parser seems stuck");
81 self.steps.set(steps + 1); 82 self.steps.set(steps + 1);
@@ -86,7 +87,7 @@ impl<'t> Parser<'t> {
86 let mut i = 0; 87 let mut i = 0;
87 88
88 loop { 89 loop {
89 let mut kind = self.token_source.token_kind(self.token_pos + i); 90 let mut kind = self.token_source.lookahead_nth(i).kind;
90 if let Some((composited, step)) = self.is_composite(kind, i) { 91 if let Some((composited, step)) = self.is_composite(kind, i) {
91 kind = composited; 92 kind = composited;
92 i += step; 93 i += step;
@@ -115,7 +116,7 @@ impl<'t> Parser<'t> {
115 116
116 /// Checks if the current token is contextual keyword with text `t`. 117 /// Checks if the current token is contextual keyword with text `t`.
117 pub(crate) fn at_contextual_kw(&self, kw: &str) -> bool { 118 pub(crate) fn at_contextual_kw(&self, kw: &str) -> bool {
118 self.token_source.is_keyword(self.token_pos, kw) 119 self.token_source.is_keyword(kw)
119 } 120 }
120 121
121 /// Starts a new node in the syntax tree. All nodes and tokens 122 /// Starts a new node in the syntax tree. All nodes and tokens
@@ -130,12 +131,12 @@ impl<'t> Parser<'t> {
130 /// Advances the parser by one token unconditionally 131 /// Advances the parser by one token unconditionally
131 /// Mainly use in `token_tree` parsing 132 /// Mainly use in `token_tree` parsing
132 pub(crate) fn bump_raw(&mut self) { 133 pub(crate) fn bump_raw(&mut self) {
133 let mut kind = self.token_source.token_kind(self.token_pos); 134 let mut kind = self.token_source.current().kind;
134 135
135 // Skip dollars, do_bump will eat these later 136 // Skip dollars, do_bump will eat these later
136 let mut i = 0; 137 let mut i = 0;
137 while kind == SyntaxKind::L_DOLLAR || kind == SyntaxKind::R_DOLLAR { 138 while kind == SyntaxKind::L_DOLLAR || kind == SyntaxKind::R_DOLLAR {
138 kind = self.token_source.token_kind(self.token_pos + i); 139 kind = self.token_source.lookahead_nth(i).kind;
139 i += 1; 140 i += 1;
140 } 141 }
141 142
@@ -236,7 +237,11 @@ impl<'t> Parser<'t> {
236 237
237 fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) { 238 fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) {
238 self.eat_dollars(); 239 self.eat_dollars();
239 self.token_pos += usize::from(n_raw_tokens); 240
241 for _ in 0..n_raw_tokens {
242 self.token_source.bump();
243 }
244
240 self.push_event(Event::Token { kind, n_raw_tokens }); 245 self.push_event(Event::Token { kind, n_raw_tokens });
241 } 246 }
242 247
@@ -249,10 +254,14 @@ impl<'t> Parser<'t> {
249 // We assume the dollars will not occuried between 254 // We assume the dollars will not occuried between
250 // mult-byte tokens 255 // mult-byte tokens
251 256
252 let jn1 = self.token_source.is_token_joint_to_next(self.token_pos + n); 257 let first = self.token_source.lookahead_nth(n);
253 let la2 = self.token_source.token_kind(self.token_pos + n + 1); 258 let second = self.token_source.lookahead_nth(n + 1);
254 let jn2 = self.token_source.is_token_joint_to_next(self.token_pos + n + 1); 259 let third = self.token_source.lookahead_nth(n + 2);
255 let la3 = self.token_source.token_kind(self.token_pos + n + 2); 260
261 let jn1 = first.is_jointed_to_next;
262 let la2 = second.kind;
263 let jn2 = second.is_jointed_to_next;
264 let la3 = third.kind;
256 265
257 match kind { 266 match kind {
258 T![.] if jn1 && la2 == T![.] && jn2 && la3 == T![.] => Some((T![...], 3)), 267 T![.] if jn1 && la2 == T![.] && jn2 && la3 == T![.] => Some((T![...], 3)),
@@ -271,9 +280,9 @@ impl<'t> Parser<'t> {
271 280
272 fn eat_dollars(&mut self) { 281 fn eat_dollars(&mut self) {
273 loop { 282 loop {
274 match self.token_source.token_kind(self.token_pos) { 283 match self.token_source.current().kind {
275 k @ SyntaxKind::L_DOLLAR | k @ SyntaxKind::R_DOLLAR => { 284 k @ SyntaxKind::L_DOLLAR | k @ SyntaxKind::R_DOLLAR => {
276 self.token_pos += 1; 285 self.token_source.bump();
277 self.push_event(Event::Token { kind: k, n_raw_tokens: 1 }); 286 self.push_event(Event::Token { kind: k, n_raw_tokens: 1 });
278 } 287 }
279 _ => { 288 _ => {
@@ -286,9 +295,9 @@ impl<'t> Parser<'t> {
286 pub(crate) fn eat_l_dollars(&mut self) -> usize { 295 pub(crate) fn eat_l_dollars(&mut self) -> usize {
287 let mut ate_count = 0; 296 let mut ate_count = 0;
288 loop { 297 loop {
289 match self.token_source.token_kind(self.token_pos) { 298 match self.token_source.current().kind {
290 k @ SyntaxKind::L_DOLLAR => { 299 k @ SyntaxKind::L_DOLLAR => {
291 self.token_pos += 1; 300 self.token_source.bump();
292 self.push_event(Event::Token { kind: k, n_raw_tokens: 1 }); 301 self.push_event(Event::Token { kind: k, n_raw_tokens: 1 });
293 ate_count += 1; 302 ate_count += 1;
294 } 303 }
@@ -302,9 +311,9 @@ impl<'t> Parser<'t> {
302 pub(crate) fn eat_r_dollars(&mut self, max_count: usize) -> usize { 311 pub(crate) fn eat_r_dollars(&mut self, max_count: usize) -> usize {
303 let mut ate_count = 0; 312 let mut ate_count = 0;
304 loop { 313 loop {
305 match self.token_source.token_kind(self.token_pos) { 314 match self.token_source.current().kind {
306 k @ SyntaxKind::R_DOLLAR => { 315 k @ SyntaxKind::R_DOLLAR => {
307 self.token_pos += 1; 316 self.token_source.bump();
308 self.push_event(Event::Token { kind: k, n_raw_tokens: 1 }); 317 self.push_event(Event::Token { kind: k, n_raw_tokens: 1 });
309 ate_count += 1; 318 ate_count += 1;
310 319
@@ -320,12 +329,12 @@ impl<'t> Parser<'t> {
320 } 329 }
321 330
322 pub(crate) fn at_l_dollar(&self) -> bool { 331 pub(crate) fn at_l_dollar(&self) -> bool {
323 let kind = self.token_source.token_kind(self.token_pos); 332 let kind = self.token_source.current().kind;
324 (kind == SyntaxKind::L_DOLLAR) 333 (kind == SyntaxKind::L_DOLLAR)
325 } 334 }
326 335
327 pub(crate) fn at_r_dollar(&self) -> bool { 336 pub(crate) fn at_r_dollar(&self) -> bool {
328 let kind = self.token_source.token_kind(self.token_pos); 337 let kind = self.token_source.current().kind;
329 (kind == SyntaxKind::R_DOLLAR) 338 (kind == SyntaxKind::R_DOLLAR)
330 } 339 }
331} 340}
diff --git a/crates/ra_syntax/src/parsing.rs b/crates/ra_syntax/src/parsing.rs
index 15d69c5ab..4c1fa6c4f 100644
--- a/crates/ra_syntax/src/parsing.rs
+++ b/crates/ra_syntax/src/parsing.rs
@@ -17,8 +17,8 @@ pub(crate) use self::reparsing::incremental_reparse;
17 17
18pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { 18pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
19 let tokens = tokenize(&text); 19 let tokens = tokenize(&text);
20 let token_source = text_token_source::TextTokenSource::new(text, &tokens); 20 let mut token_source = text_token_source::TextTokenSource::new(text, &tokens);
21 let mut tree_sink = text_tree_sink::TextTreeSink::new(text, &tokens); 21 let mut tree_sink = text_tree_sink::TextTreeSink::new(text, &tokens);
22 ra_parser::parse(&token_source, &mut tree_sink); 22 ra_parser::parse(&mut token_source, &mut tree_sink);
23 tree_sink.finish() 23 tree_sink.finish()
24} 24}
diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs
index 6de02a15a..3b6687f61 100644
--- a/crates/ra_syntax/src/parsing/reparsing.rs
+++ b/crates/ra_syntax/src/parsing/reparsing.rs
@@ -85,9 +85,9 @@ fn reparse_block<'node>(
85 if !is_balanced(&tokens) { 85 if !is_balanced(&tokens) {
86 return None; 86 return None;
87 } 87 }
88 let token_source = TextTokenSource::new(&text, &tokens); 88 let mut token_source = TextTokenSource::new(&text, &tokens);
89 let mut tree_sink = TextTreeSink::new(&text, &tokens); 89 let mut tree_sink = TextTreeSink::new(&text, &tokens);
90 reparser.parse(&token_source, &mut tree_sink); 90 reparser.parse(&mut token_source, &mut tree_sink);
91 let (green, new_errors) = tree_sink.finish(); 91 let (green, new_errors) = tree_sink.finish();
92 Some((node.replace_with(green), new_errors, node.range())) 92 Some((node.replace_with(green), new_errors, node.range()))
93} 93}
diff --git a/crates/ra_syntax/src/parsing/text_token_source.rs b/crates/ra_syntax/src/parsing/text_token_source.rs
index a6277f66f..71d2947f7 100644
--- a/crates/ra_syntax/src/parsing/text_token_source.rs
+++ b/crates/ra_syntax/src/parsing/text_token_source.rs
@@ -1,7 +1,8 @@
1use ra_parser::TokenSource; 1use ra_parser::TokenSource;
2use ra_parser::Token as PToken;
2 3
3use crate::{ 4use crate::{
4 SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit, 5 SyntaxKind::EOF, TextRange, TextUnit,
5 parsing::lexer::Token, 6 parsing::lexer::Token,
6}; 7};
7 8
@@ -23,31 +24,50 @@ pub(crate) struct TextTokenSource<'t> {
23 /// ``` 24 /// ```
24 /// tokens: `[struct, Foo, {, }]` 25 /// tokens: `[struct, Foo, {, }]`
25 tokens: Vec<Token>, 26 tokens: Vec<Token>,
27
28 /// Current token and position
29 curr: (PToken, usize),
26} 30}
27 31
28impl<'t> TokenSource for TextTokenSource<'t> { 32impl<'t> TokenSource for TextTokenSource<'t> {
29 fn token_kind(&self, pos: usize) -> SyntaxKind { 33 fn current(&self) -> PToken {
30 if !(pos < self.tokens.len()) { 34 return self.curr.0;
31 return EOF;
32 }
33 self.tokens[pos].kind
34 } 35 }
35 fn is_token_joint_to_next(&self, pos: usize) -> bool { 36
36 if !(pos + 1 < self.tokens.len()) { 37 fn lookahead_nth(&self, n: usize) -> PToken {
37 return true; 38 mk_token(self.curr.1 + n, &self.start_offsets, &self.tokens)
39 }
40
41 fn bump(&mut self) {
42 if self.curr.0.kind == EOF {
43 return;
38 } 44 }
39 self.start_offsets[pos] + self.tokens[pos].len == self.start_offsets[pos + 1] 45
46 let pos = self.curr.1 + 1;
47 self.curr = (mk_token(pos, &self.start_offsets, &self.tokens), pos);
40 } 48 }
41 fn is_keyword(&self, pos: usize, kw: &str) -> bool { 49
50 fn is_keyword(&self, kw: &str) -> bool {
51 let pos = self.curr.1;
42 if !(pos < self.tokens.len()) { 52 if !(pos < self.tokens.len()) {
43 return false; 53 return false;
44 } 54 }
45 let range = TextRange::offset_len(self.start_offsets[pos], self.tokens[pos].len); 55 let range = TextRange::offset_len(self.start_offsets[pos], self.tokens[pos].len);
46
47 self.text[range] == *kw 56 self.text[range] == *kw
48 } 57 }
49} 58}
50 59
60fn mk_token(pos: usize, start_offsets: &[TextUnit], tokens: &[Token]) -> PToken {
61 let kind = tokens.get(pos).map(|t| t.kind).unwrap_or(EOF);
62 let is_jointed_to_next = if pos + 1 < start_offsets.len() {
63 start_offsets[pos] + tokens[pos].len == start_offsets[pos + 1]
64 } else {
65 false
66 };
67
68 PToken { kind, is_jointed_to_next }
69}
70
51impl<'t> TextTokenSource<'t> { 71impl<'t> TextTokenSource<'t> {
52 /// Generate input from tokens(expect comment and whitespace). 72 /// Generate input from tokens(expect comment and whitespace).
53 pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> { 73 pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> {
@@ -62,6 +82,7 @@ impl<'t> TextTokenSource<'t> {
62 len += token.len; 82 len += token.len;
63 } 83 }
64 84
65 TextTokenSource { text, start_offsets, tokens } 85 let first = mk_token(0, &start_offsets, &tokens);
86 TextTokenSource { text, start_offsets, tokens, curr: (first, 0) }
66 } 87 }
67} 88}
diff --git a/docs/user/README.md b/docs/user/README.md
index affb96939..47fc840f7 100644
--- a/docs/user/README.md
+++ b/docs/user/README.md
@@ -89,6 +89,25 @@ to load path and require it in `init.el`
89* (Optionally) bind commands like `rust-analyzer-join-lines` or `rust-analyzer-extend-selection` to keys 89* (Optionally) bind commands like `rust-analyzer-join-lines` or `rust-analyzer-extend-selection` to keys
90 90
91 91
92## Vim and NeoVim
93
94* Install coc.nvim by following the instructions at [coc.nvim]
95* Add rust analyzer using: [coc.nvim wiki][coc-wiki]
96
97```jsonc
98 "languageserver": {
99 "rust": {
100 "command": "ra_lsp_server",
101 "filetypes": ["rust"],
102 "rootPatterns": ["Cargo.toml"]
103 }
104}
105```
106
107[coc.nvim]: https://github.com/neoclide/coc.nvim
108[coc-wiki]: https://github.com/neoclide/coc.nvim/wiki/Language-servers#rust
109
110
92## Sublime Text 3 111## Sublime Text 3
93 112
94Prequisites: 113Prequisites: